From 8627108a7b09bd15b3a58d91b0dea5f8850ff891 Mon Sep 17 00:00:00 2001 From: Thomas Coratger Date: Fri, 20 Feb 2026 10:54:14 +0100 Subject: [PATCH] chore: rm duplicates --- .../testing/src/consensus_testing/keys.py | 4 - .../src/framework/pytest_plugins/filler.py | 62 ++---- src/lean_spec/subspecs/koalabear/__init__.py | 4 +- src/lean_spec/subspecs/koalabear/field.py | 56 ------ .../networking/client/event_source.py | 148 --------------- src/lean_spec/subspecs/networking/config.py | 9 - .../subspecs/networking/gossipsub/__init__.py | 2 - .../subspecs/networking/gossipsub/mcache.py | 8 - .../subspecs/networking/gossipsub/mesh.py | 11 -- .../subspecs/networking/gossipsub/message.py | 9 - .../subspecs/networking/gossipsub/rpc.py | 58 ------ .../subspecs/networking/gossipsub/topic.py | 43 ----- .../subspecs/networking/service/service.py | 9 - .../subspecs/networking/transport/peer_id.py | 48 ----- .../subspecs/networking/transport/quic/tls.py | 177 ------------------ src/lean_spec/subspecs/networking/types.py | 34 ---- src/lean_spec/subspecs/ssz/__init__.py | 3 - src/lean_spec/subspecs/ssz/merkleization.py | 24 --- src/lean_spec/subspecs/ssz/pack.py | 10 +- src/lean_spec/subspecs/storage/namespaces.py | 3 - src/lean_spec/types/base.py | 6 +- tests/interop/helpers/__init__.py | 6 - tests/interop/helpers/assertions.py | 117 ------------ tests/lean_spec/helpers/__init__.py | 9 +- tests/lean_spec/helpers/builders.py | 43 +---- tests/lean_spec/helpers/mocks.py | 51 ----- .../lean_spec/subspecs/forkchoice/conftest.py | 54 +----- .../subspecs/koalabear/test_field.py | 87 +-------- .../gossipsub/integration/conftest.py | 11 -- .../gossipsub/integration/network.py | 11 -- .../gossipsub/test_cache_edge_cases.py | 30 +-- .../networking/gossipsub/test_gossipsub.py | 44 +---- .../subspecs/networking/test_peer.py | 12 +- .../transport/identity/test_keypair.py | 2 +- .../networking/transport/test_peer_id.py | 11 +- .../subspecs/ssz/test_merkleization.py | 41 ---- tests/lean_spec/subspecs/ssz/test_pack.py | 25 --- tests/lean_spec/types/test_bitfields.py | 9 +- tests/lean_spec/types/test_collections.py | 7 +- 39 files changed, 49 insertions(+), 1249 deletions(-) diff --git a/packages/testing/src/consensus_testing/keys.py b/packages/testing/src/consensus_testing/keys.py index 9e91bc19..323ba870 100755 --- a/packages/testing/src/consensus_testing/keys.py +++ b/packages/testing/src/consensus_testing/keys.py @@ -289,10 +289,6 @@ def get_public_key(self, idx: ValidatorIndex) -> PublicKey: """Get a validator's public key.""" return self[idx].public - def get_all_public_keys(self) -> dict[ValidatorIndex, PublicKey]: - """Get all public keys (from base keys, not advanced state).""" - return {idx: kp.public for idx, kp in self.keys.items()} - def sign_attestation_data( self, validator_id: ValidatorIndex, diff --git a/packages/testing/src/framework/pytest_plugins/filler.py b/packages/testing/src/framework/pytest_plugins/filler.py index b782df0e..7f450a41 100644 --- a/packages/testing/src/framework/pytest_plugins/filler.py +++ b/packages/testing/src/framework/pytest_plugins/filler.py @@ -305,10 +305,13 @@ def pytest_collection_modifyitems(config: pytest.Config, items: List[pytest.Item config.hook.pytest_deselected(items=deselected) -def _is_test_item_valid_for_fork(item: pytest.Item, fork_class: Any, get_fork_by_name: Any) -> bool: - """Check if a test item is valid for the given fork based on validity markers.""" - markers = list(item.iter_markers()) +def _check_markers_valid_for_fork( + markers: list[Any], fork_class: Any, get_fork_by_name: Any +) -> bool: + """Check if test markers indicate validity for the given fork. + Shared logic for both collection-time and parametrization-time fork filtering. + """ has_valid_from = False has_valid_until = False has_valid_at = False @@ -354,6 +357,11 @@ def _is_test_item_valid_for_fork(item: pytest.Item, fork_class: Any, get_fork_by return from_valid and until_valid +def _is_test_item_valid_for_fork(item: pytest.Item, fork_class: Any, get_fork_by_name: Any) -> bool: + """Check if a test item is valid for the given fork based on validity markers.""" + return _check_markers_valid_for_fork(list(item.iter_markers()), fork_class, get_fork_by_name) + + def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None: """Write all collected fixtures at the end of the session.""" if hasattr(session.config, "fixture_collector"): @@ -527,51 +535,9 @@ def _is_test_valid_for_fork( metafunc: pytest.Metafunc, fork_class: Any, get_fork_by_name: Any ) -> bool: """Check if a test is valid for the given fork based on validity markers.""" - markers = list(metafunc.definition.iter_markers()) - - has_valid_from = False - has_valid_until = False - has_valid_at = False - - valid_from_forks = [] - valid_until_forks = [] - valid_at_forks = [] - - for marker in markers: - if marker.name == "valid_from": - has_valid_from = True - for fork_name in marker.args: - target_fork = get_fork_by_name(fork_name) - if target_fork: - valid_from_forks.append(target_fork) - elif marker.name == "valid_until": - has_valid_until = True - for fork_name in marker.args: - target_fork = get_fork_by_name(fork_name) - if target_fork: - valid_until_forks.append(target_fork) - elif marker.name == "valid_at": - has_valid_at = True - for fork_name in marker.args: - target_fork = get_fork_by_name(fork_name) - if target_fork: - valid_at_forks.append(target_fork) - - if not (has_valid_from or has_valid_until or has_valid_at): - return True - - if has_valid_at: - return fork_class in valid_at_forks - - from_valid = True - if has_valid_from: - from_valid = any(fork_class >= from_fork for from_fork in valid_from_forks) - - until_valid = True - if has_valid_until: - until_valid = any(fork_class <= until_fork for until_fork in valid_until_forks) - - return from_valid and until_valid + return _check_markers_valid_for_fork( + list(metafunc.definition.iter_markers()), fork_class, get_fork_by_name + ) def _register_layer_fixtures(config: pytest.Config, layer: str) -> None: diff --git a/src/lean_spec/subspecs/koalabear/__init__.py b/src/lean_spec/subspecs/koalabear/__init__.py index bb704489..99418150 100644 --- a/src/lean_spec/subspecs/koalabear/__init__.py +++ b/src/lean_spec/subspecs/koalabear/__init__.py @@ -1,11 +1,9 @@ """Specifications for the KoalaBear finite field.""" -from .field import P_BITS, P_BYTES, TWO_ADICITY, Fp, P +from .field import P_BYTES, Fp, P __all__ = [ "P", - "P_BITS", "P_BYTES", - "TWO_ADICITY", "Fp", ] diff --git a/src/lean_spec/subspecs/koalabear/field.py b/src/lean_spec/subspecs/koalabear/field.py index 20f0052d..6f75b200 100644 --- a/src/lean_spec/subspecs/koalabear/field.py +++ b/src/lean_spec/subspecs/koalabear/field.py @@ -191,59 +191,3 @@ def __bytes__(self) -> bytes: 4-byte little-endian representation of the field element. """ return self.encode_bytes() - - @classmethod - def from_bytes(cls, data: bytes) -> Self: - """ - Deserialize a field element from bytes. - - This is the inverse of `__bytes__()` and follows Python's standard - deserialization pattern. - - Args: - data: 4-byte little-endian representation of a field element. - - Returns: - Deserialized field element. - - Raises: - ValueError: If data has incorrect length or represents an invalid field value. - """ - return cls.decode_bytes(data) - - @classmethod - def serialize_list(cls, elements: list[Self]) -> bytes: - """ - Serialize a list of field elements to bytes. - - This is a convenience method for serializing multiple field elements - at once, useful for container serialization. - - Args: - elements: List of field elements to serialize. - - Returns: - Concatenated bytes of all field elements. - """ - return b"".join(bytes(elem) for elem in elements) - - @classmethod - def deserialize_list(cls, data: bytes, count: int) -> list[Self]: - """ - Deserialize a fixed number of field elements from bytes. - - Args: - data: Raw bytes to deserialize. - count: Expected number of field elements. - - Returns: - List of deserialized field elements. - - Raises: - ValueError: If data length doesn't match expected count. - """ - expected_len = count * P_BYTES - if len(data) != expected_len: - raise ValueError(f"Expected {expected_len} bytes for {count} elements, got {len(data)}") - - return [cls.from_bytes(data[i : i + P_BYTES]) for i in range(0, len(data), P_BYTES)] diff --git a/src/lean_spec/subspecs/networking/client/event_source.py b/src/lean_spec/subspecs/networking/client/event_source.py index 8d27b3aa..e844bb63 100644 --- a/src/lean_spec/subspecs/networking/client/event_source.py +++ b/src/lean_spec/subspecs/networking/client/event_source.py @@ -154,7 +154,6 @@ from lean_spec.subspecs.networking.varint import ( VarintError, decode_varint, - encode_varint, ) from lean_spec.types.exceptions import SSZSerializationError @@ -1345,119 +1344,6 @@ async def setup_outbound_with_delay() -> None: # The connection will be cleaned up elsewhere. logger.warning("Stream acceptor error for %s: %s", peer_id, e) - async def _handle_gossip_stream(self, peer_id: PeerId, stream: InboundStreamProtocol) -> None: - """ - Handle an incoming gossip stream. - - Reads the gossip message, decodes it, and emits the appropriate event. - - Args: - peer_id: Peer that sent the message. - stream: QUIC stream containing the gossip message. - - - COMPLETE FLOW - ------------- - A gossip message goes through these stages: - - 1. Read raw bytes from QUIC stream. - 2. Parse topic string and data length (varints). - 3. Decompress Snappy-framed data. - 4. Decode SSZ bytes into typed object. - 5. Emit event to the sync layer. - - Any stage can fail. Failures are logged but don't crash the handler. - - - ERROR HANDLING STRATEGY - ----------------------- - Gossip is best-effort. A single bad message should not: - - - Crash the node. - - Disconnect the peer. - - Block other messages. - - We log errors and continue. Peer scoring (not implemented here) - would track repeated failures for reputation management. - - - RESOURCE CLEANUP - ---------------- - The stream MUST be closed in finally, even if errors occur. - Unclosed streams leak QUIC resources and can cause deadlocks. - """ - try: - # Step 1: Read the gossip message from the stream. - # - # This parses the varint-prefixed topic and data fields. - # May fail if the message is truncated or malformed. - topic_str, compressed_data = await read_gossip_message(stream) - - # Step 2: Decode the message. - # - # This performs: - # - Topic validation (correct prefix, encoding, fork). - # - Snappy decompression with CRC verification. - # - SSZ decoding into the appropriate type. - message = self._gossip_handler.decode_message(topic_str, compressed_data) - topic = self._gossip_handler.get_topic(topic_str) - - # Step 3: Emit the appropriate event based on message type. - # - # The topic determines the expected message type. - # We verify the decoded type matches to catch bugs. - match topic.kind: - case TopicKind.BLOCK: - if isinstance(message, SignedBlockWithAttestation): - await self._emit_gossip_block(message, peer_id) - else: - # Type mismatch indicates a bug in decode_message. - logger.warning("Block topic but got %s", type(message).__name__) - - case TopicKind.ATTESTATION_SUBNET: - if isinstance(message, SignedAttestation): - await self._emit_gossip_attestation(message, peer_id) - else: - # Type mismatch indicates a bug in decode_message. - logger.warning("Attestation topic but got %s", type(message).__name__) - - case TopicKind.AGGREGATED_ATTESTATION: - if isinstance(message, SignedAggregatedAttestation): - await self._emit_gossip_aggregated_attestation(message, peer_id) - else: - logger.warning( - "Aggregated attestation topic but got %s", - type(message).__name__, - ) - - logger.debug("Received gossip %s from %s", topic.kind.value, peer_id) - - except GossipMessageError as e: - # Expected error: malformed message, decompression failure, etc. - # - # This is not necessarily a bug. The peer may be misbehaving - # or there may be network corruption. Log and continue. - logger.warning("Gossip message error from %s: %s", peer_id, e) - - except Exception as e: - # Unexpected error: likely a bug in our code. - # - # Log as warning to aid debugging. Don't crash. - logger.warning("Unexpected error handling gossip from %s: %s", peer_id, e) - - finally: - # Always close the stream to release QUIC resources. - # - # Unclosed streams cause resource leaks and can deadlock - # the connection if too many accumulate. - # - # The try/except suppresses close errors. The stream may - # already be closed if the connection dropped. - try: - await stream.close() - except Exception: - pass - async def publish(self, topic: str, data: bytes) -> None: """ Broadcast a message to all connected peers on a topic. @@ -1482,37 +1368,3 @@ async def publish(self, topic: str, data: bytes) -> None: logger.debug("Published message to gossipsub topic %s", topic) except Exception as e: logger.warning("Failed to publish to gossipsub: %s", e) - - async def _send_gossip_message( - self, - conn: QuicConnection, - topic: str, - data: bytes, - ) -> None: - """ - Send a gossip message to a peer. - - Opens a new stream for the gossip message and sends the data. - - Args: - conn: QuicConnection to the peer. - topic: Topic string for the message. - data: Message bytes to send. - """ - # Open a new outbound stream for gossip protocol. - stream = await conn.open_stream(GOSSIPSUB_DEFAULT_PROTOCOL_ID) - - try: - # Format: topic length (varint) + topic + data length (varint) + data - topic_bytes = topic.encode("utf-8") - - # Write topic length and topic. - await stream.write(encode_varint(len(topic_bytes))) - await stream.write(topic_bytes) - - # Write data length and data. - await stream.write(encode_varint(len(data))) - await stream.write(data) - - finally: - await stream.close() diff --git a/src/lean_spec/subspecs/networking/config.py b/src/lean_spec/subspecs/networking/config.py index e09054f9..51e0cf36 100644 --- a/src/lean_spec/subspecs/networking/config.py +++ b/src/lean_spec/subspecs/networking/config.py @@ -46,9 +46,6 @@ # --- Gossipsub Protocol IDs --- -GOSSIPSUB_PROTOCOL_ID_V10: Final[str] = "/meshsub/1.0.0" -"""Gossipsub v1.0 protocol ID - basic mesh pubsub.""" - GOSSIPSUB_PROTOCOL_ID_V11: Final[str] = "/meshsub/1.1.0" """Gossipsub v1.1 protocol ID - peer scoring, extended validators. @@ -75,11 +72,5 @@ before attempting to re-graft. This prevents rapid mesh churn. """ -MESSAGE_ID_SIZE: Final[int] = 20 -"""Size of gossipsub message IDs in bytes. - -Per Ethereum spec, message IDs are the first 20 bytes of SHA256(domain + topic_len + topic + data). -""" - MAX_ERROR_MESSAGE_SIZE: Final[int] = 256 """Maximum error message size in bytes per Ethereum P2P spec (ErrorMessage: List[byte, 256]).""" diff --git a/src/lean_spec/subspecs/networking/gossipsub/__init__.py b/src/lean_spec/subspecs/networking/gossipsub/__init__.py index af4e7fac..c42b3716 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/__init__.py +++ b/src/lean_spec/subspecs/networking/gossipsub/__init__.py @@ -42,7 +42,6 @@ ForkMismatchError, GossipTopic, TopicKind, - format_topic_string, parse_topic_string, ) from .types import ( @@ -59,7 +58,6 @@ # Topic (commonly needed for Ethereum) "GossipTopic", "TopicKind", - "format_topic_string", "parse_topic_string", "ForkMismatchError", # Types diff --git a/src/lean_spec/subspecs/networking/gossipsub/mcache.py b/src/lean_spec/subspecs/networking/gossipsub/mcache.py index d9544778..983f0143 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/mcache.py +++ b/src/lean_spec/subspecs/networking/gossipsub/mcache.py @@ -236,10 +236,6 @@ def __len__(self) -> int: """Return the total number of cached messages.""" return len(self._by_id) - def __contains__(self, msg_id: MessageId) -> bool: - """Check if a message ID is in the cache.""" - return msg_id in self._by_id - @dataclass(slots=True) class SeenCache: @@ -328,7 +324,3 @@ def clear(self) -> None: def __len__(self) -> int: """Return the number of seen message IDs.""" return len(self._timestamps) - - def __contains__(self, msg_id: MessageId) -> bool: - """Check if a message ID has been seen.""" - return msg_id in self._timestamps diff --git a/src/lean_spec/subspecs/networking/gossipsub/mesh.py b/src/lean_spec/subspecs/networking/gossipsub/mesh.py index 43793851..d9d17b06 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/mesh.py +++ b/src/lean_spec/subspecs/networking/gossipsub/mesh.py @@ -197,17 +197,6 @@ def unsubscribe(self, topic: TopicId) -> set[PeerId]: mesh = self._meshes.pop(topic, None) return mesh.peers if mesh else set() - def is_subscribed(self, topic: TopicId) -> bool: - """Check if subscribed to a topic. - - Args: - topic: Topic identifier to check. - - Returns: - True if subscribed. - """ - return topic in self._subscriptions - def get_mesh_peers(self, topic: TopicId) -> set[PeerId]: """Get mesh peers for a topic. diff --git a/src/lean_spec/subspecs/networking/gossipsub/message.py b/src/lean_spec/subspecs/networking/gossipsub/message.py index 9c5e5ac1..952a3fa9 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/message.py +++ b/src/lean_spec/subspecs/networking/gossipsub/message.py @@ -174,15 +174,6 @@ def compute_id( return Bytes20(hashlib.sha256(preimage).digest()[:20]) - @property - def topic_str(self) -> str: - """Get the topic as a UTF-8 string. - - Returns: - Topic decoded from bytes to string. - """ - return self.topic.decode("utf-8") - def __hash__(self) -> int: """Hash based on message ID. diff --git a/src/lean_spec/subspecs/networking/gossipsub/rpc.py b/src/lean_spec/subspecs/networking/gossipsub/rpc.py index 76b4b22c..83b7a7fa 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/rpc.py +++ b/src/lean_spec/subspecs/networking/gossipsub/rpc.py @@ -667,61 +667,3 @@ def create_graft_rpc(topics: list[str]) -> RPC: RPC ready to be encoded and sent. """ return RPC(control=ControlMessage(graft=[ControlGraft(topic_id=t) for t in topics])) - - -def create_prune_rpc(topics: list[str], backoff: int = 60) -> RPC: - """ - Create an RPC with PRUNE control messages. - - Args: - topics: List of topic IDs to notify mesh removal for. - backoff: Backoff duration in seconds before re-grafting. - - Returns: - RPC ready to be encoded and sent. - """ - return RPC( - control=ControlMessage(prune=[ControlPrune(topic_id=t, backoff=backoff) for t in topics]) - ) - - -def create_ihave_rpc(topic_id: str, message_ids: list[bytes]) -> RPC: - """ - Create an RPC with IHAVE control message. - - Args: - topic_id: Topic the messages belong to. - message_ids: Message IDs to advertise. - - Returns: - RPC ready to be encoded and sent. - """ - ihave = ControlIHave(topic_id=topic_id, message_ids=message_ids) - return RPC(control=ControlMessage(ihave=[ihave])) - - -def create_iwant_rpc(message_ids: list[bytes]) -> RPC: - """ - Create an RPC with IWANT control message. - - Args: - message_ids: Message IDs being requested. - - Returns: - RPC ready to be encoded and sent. - """ - return RPC(control=ControlMessage(iwant=[ControlIWant(message_ids=message_ids)])) - - -def create_publish_rpc(topic: str, data: bytes) -> RPC: - """ - Create an RPC with a published message. - - Args: - topic: Topic to publish to. - data: Message payload. - - Returns: - RPC ready to be encoded and sent. - """ - return RPC(publish=[Message(topic=topic, data=data)]) diff --git a/src/lean_spec/subspecs/networking/gossipsub/topic.py b/src/lean_spec/subspecs/networking/gossipsub/topic.py index 6b1c5560..32b6a866 100644 --- a/src/lean_spec/subspecs/networking/gossipsub/topic.py +++ b/src/lean_spec/subspecs/networking/gossipsub/topic.py @@ -170,14 +170,6 @@ def __str__(self) -> str: topic_name = str(self.kind) return f"/{TOPIC_PREFIX}/{self.fork_digest}/{topic_name}/{ENCODING_POSTFIX}" - def __bytes__(self) -> bytes: - """Return the topic string as UTF-8 bytes. - - Returns: - Topic string encoded as bytes. - """ - return str(self).encode("utf-8") - def validate_fork(self, expected_fork_digest: str) -> None: """ Validate that the topic's fork_digest matches expected. @@ -191,18 +183,6 @@ def validate_fork(self, expected_fork_digest: str) -> None: if self.fork_digest != expected_fork_digest: raise ForkMismatchError(expected_fork_digest, self.fork_digest) - def is_fork_compatible(self, expected_fork_digest: str) -> bool: - """ - Check if this topic is compatible with the expected fork. - - Args: - expected_fork_digest: Expected fork digest (0x-prefixed hex). - - Returns: - True if fork_digest matches, False otherwise. - """ - return self.fork_digest == expected_fork_digest - @classmethod def from_string(cls, topic_str: str) -> GossipTopic: """Parse a topic string into a GossipTopic. @@ -305,29 +285,6 @@ def attestation_subnet(cls, fork_digest: str, subnet_id: SubnetId) -> GossipTopi return cls(kind=TopicKind.ATTESTATION_SUBNET, fork_digest=fork_digest, subnet_id=subnet_id) -def format_topic_string( - topic_name: str, - fork_digest: str, - prefix: str = TOPIC_PREFIX, - encoding: str = ENCODING_POSTFIX, -) -> str: - """Format a complete gossip topic string. - - Low-level function for constructing topic strings. - Prefer the dataclass representation for most use cases. - - Args: - topic_name: Message type (e.g., "block", "attestation"). - fork_digest: Fork digest as 0x-prefixed hex string. - prefix: Network prefix (defaults to TOPIC_PREFIX). - encoding: Encoding suffix (defaults to ENCODING_POSTFIX). - - Returns: - Formatted topic string. - """ - return f"/{prefix}/{fork_digest}/{topic_name}/{encoding}" - - def parse_topic_string(topic_str: str) -> tuple[str, str, str, str]: """Parse a topic string into its components. diff --git a/src/lean_spec/subspecs/networking/service/service.py b/src/lean_spec/subspecs/networking/service/service.py index 3bcd5bbd..28fa9bbf 100644 --- a/src/lean_spec/subspecs/networking/service/service.py +++ b/src/lean_spec/subspecs/networking/service/service.py @@ -83,9 +83,6 @@ class NetworkService: _running: bool = field(default=False, repr=False) """Whether the event loop is running.""" - _events_processed: int = field(default=0, repr=False) - """Counter for processed events (for monitoring).""" - async def run(self) -> None: """ Main event loop - route events until stopped. @@ -109,7 +106,6 @@ async def run(self) -> None: break await self._handle_event(event) - self._events_processed += 1 except StopAsyncIteration: # Source exhausted - normal termination for finite event sources. @@ -201,11 +197,6 @@ def is_running(self) -> bool: """Check if the event loop is currently running.""" return self._running - @property - def events_processed(self) -> int: - """Total events processed since creation.""" - return self._events_processed - async def publish_block(self, block: SignedBlockWithAttestation) -> None: """ Publish a block to the gossip network. diff --git a/src/lean_spec/subspecs/networking/transport/peer_id.py b/src/lean_spec/subspecs/networking/transport/peer_id.py index 0d8ab557..05e6e277 100644 --- a/src/lean_spec/subspecs/networking/transport/peer_id.py +++ b/src/lean_spec/subspecs/networking/transport/peer_id.py @@ -374,26 +374,6 @@ def __repr__(self) -> str: """Return detailed representation.""" return f"PeerId({self!s})" - def to_base58(self) -> str: - """ - Return Base58-encoded PeerId string. - - This is the legacy format currently recommended by the libp2p spec. - - Returns: - Base58-encoded string suitable for display or serialization. - """ - return Base58.encode(self.multihash) - - def to_bytes(self) -> bytes: - """ - Return the raw multihash bytes. - - Returns: - Multihash bytes (can be used for binary protocols). - """ - return self.multihash - @classmethod def from_base58(cls, s: str) -> PeerId: """ @@ -410,19 +390,6 @@ def from_base58(cls, s: str) -> PeerId: """ return cls(multihash=Base58.decode(s)) - @classmethod - def from_bytes(cls, data: bytes) -> PeerId: - """ - Create PeerId from raw multihash bytes. - - Args: - data: Multihash bytes. - - Returns: - PeerId wrapping the multihash. - """ - return cls(multihash=data) - @classmethod def from_public_key(cls, public_key: PublicKeyProto) -> PeerId: """ @@ -465,18 +432,3 @@ def from_secp256k1(cls, public_key_bytes: bytes) -> PeerId: proto = PublicKeyProto(key_type=KeyType.SECP256K1, key_data=public_key_bytes) return cls.from_public_key(proto) - - @classmethod - def derive(cls, key_data: bytes, key_type: KeyType) -> PeerId: - """ - Derive PeerId from raw key bytes and type. - - Args: - key_data: Raw public key bytes. - key_type: Key algorithm type. - - Returns: - Derived PeerId. - """ - proto = PublicKeyProto(key_type=key_type, key_data=key_data) - return cls.from_public_key(proto) diff --git a/src/lean_spec/subspecs/networking/transport/quic/tls.py b/src/lean_spec/subspecs/networking/transport/quic/tls.py index 7a1b8827..a594376f 100644 --- a/src/lean_spec/subspecs/networking/transport/quic/tls.py +++ b/src/lean_spec/subspecs/networking/transport/quic/tls.py @@ -36,9 +36,6 @@ LIBP2P_EXTENSION_OID = x509.ObjectIdentifier("1.3.6.1.4.1.53594.1.1") """libp2p TLS extension OID (Protocol Labs assigned).""" -LIBP2P_TLS_ALPN = b"libp2p" -"""ALPN protocol identifier for libp2p QUIC/TLS.""" - SIGNATURE_PREFIX = b"libp2p-tls-handshake:" """ Prefix for the signed payload. @@ -143,53 +140,6 @@ def generate_libp2p_certificate( return private_pem, cert_pem, cert -def verify_libp2p_certificate(cert: x509.Certificate) -> bytes: - """ - Verify a libp2p TLS certificate and extract the peer's identity public key. - - Validates: - 1. Certificate has the libp2p extension - 2. Extension contains a valid identity signature - - Args: - cert: X.509 certificate to verify. - - Returns: - Peer's secp256k1 compressed public key (33 bytes). - - Raises: - ValueError: If certificate is invalid or verification fails. - """ - # Find the libp2p extension. - try: - ext = cert.extensions.get_extension_for_oid(LIBP2P_EXTENSION_OID) - except x509.ExtensionNotFound as e: - raise ValueError("Certificate missing libp2p extension") from e - - # Parse the extension payload. - # - # The UnrecognizedExtension stores raw bytes in the value attribute. - if not isinstance(ext.value, x509.UnrecognizedExtension): - raise ValueError("Invalid libp2p extension type") - - extension_data = ext.value.value - public_key_bytes, signature = _parse_extension_payload(extension_data) - - # Get TLS public key for signature verification. - tls_public_bytes = cert.public_key().public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - ) - - # Verify the signature. - # - # The signature proves the peer owns the identity key and is binding it - # to this specific TLS key pair. - _verify_identity_signature(public_key_bytes, tls_public_bytes, signature) - - return public_key_bytes - - def _create_extension_payload( identity_key: IdentityKeypair, tls_public_bytes: bytes, @@ -276,130 +226,3 @@ def _encode_asn1_length(length: int) -> bytes: return bytes([0x81, length]) else: return bytes([0x82, length >> 8, length & 0xFF]) - - -def _parse_extension_payload(data: bytes) -> tuple[bytes, bytes]: - """ - Parse libp2p extension payload (ASN.1 DER format). - - Returns: - (public_key_bytes, signature) tuple. - - Raises: - ValueError: If payload is malformed. - """ - # Parse ASN.1 SEQUENCE. - if data[0] != 0x30: - raise ValueError("Expected ASN.1 SEQUENCE") - pos, seq_len = _parse_asn1_length(data, 1) - if pos + seq_len != len(data): - raise ValueError("Invalid SEQUENCE length") - - # Parse first OCTET STRING (public key protobuf). - if data[pos] != 0x04: - raise ValueError("Expected OCTET STRING for public key") - pos, octet1_len = _parse_asn1_length(data, pos + 1) - public_key_proto = data[pos : pos + octet1_len] - pos += octet1_len - - # Parse second OCTET STRING (signature). - if data[pos] != 0x04: - raise ValueError("Expected OCTET STRING for signature") - pos, octet2_len = _parse_asn1_length(data, pos + 1) - signature = data[pos : pos + octet2_len] - - # Parse protobuf to extract public key bytes. - public_key_bytes = _parse_public_key_message(public_key_proto) - - return public_key_bytes, signature - - -def _parse_asn1_length(data: bytes, pos: int) -> tuple[int, int]: - """ - Parse ASN.1 DER length. - - Returns: - (new_position, length) tuple. - """ - first_byte = data[pos] - if first_byte < 128: - return pos + 1, first_byte - elif first_byte == 0x81: - return pos + 2, data[pos + 1] - elif first_byte == 0x82: - return pos + 3, (data[pos + 1] << 8) | data[pos + 2] - else: - raise ValueError(f"Unsupported length encoding: {first_byte}") - - -def _parse_public_key_message(data: bytes) -> bytes: - """Parse PublicKey protobuf message to extract key bytes.""" - pos = 0 - key_type = None - key_data = None - - while pos < len(data): - tag = data[pos] - pos += 1 - - # Field 1: Type (tag=0x08, wire type=0=varint) - if tag == 0x08: - key_type = data[pos] - pos += 1 - - # Field 2: Data (tag=0x12, wire type=2=length-delimited) - elif tag == 0x12: - length = data[pos] - pos += 1 - key_data = data[pos : pos + length] - pos += length - else: - raise ValueError(f"Unknown public key field tag: {tag}") - - if key_type != KEY_TYPE_SECP256K1: - raise ValueError(f"Unsupported key type: {key_type}") - if key_data is None: - raise ValueError("Missing key data") - - return key_data - - -def _verify_identity_signature( - public_key_bytes: bytes, - tls_public_bytes: bytes, - signature: bytes, -) -> None: - """ - Verify the identity signature over TLS public key. - - Args: - public_key_bytes: secp256k1 compressed public key (33 bytes). - tls_public_bytes: TLS public key in SubjectPublicKeyInfo format. - signature: DER-encoded ECDSA signature. - - Raises: - ValueError: If signature is invalid. - """ - # Reconstruct the secp256k1 public key. - # - # Compressed public key is 33 bytes (02/03 prefix + 32 byte X coordinate). - try: - public_key = ec.EllipticCurvePublicKey.from_encoded_point( - ec.SECP256K1(), - public_key_bytes, - ) - except Exception as e: - raise ValueError(f"Invalid public key: {e}") from e - - # Verify DER-encoded signature. - # - # libp2p uses ECDSA with SHA-256 for secp256k1 signatures. - to_verify = SIGNATURE_PREFIX + tls_public_bytes - try: - public_key.verify( - signature, # DER-encoded - to_verify, - ec.ECDSA(hashes.SHA256()), - ) - except Exception as e: - raise ValueError(f"Signature verification failed: {e}") from e diff --git a/src/lean_spec/subspecs/networking/types.py b/src/lean_spec/subspecs/networking/types.py index eb3283bf..a7fb86b3 100644 --- a/src/lean_spec/subspecs/networking/types.py +++ b/src/lean_spec/subspecs/networking/types.py @@ -73,37 +73,3 @@ class ConnectionState(IntEnum): DISCONNECTING = auto() """Graceful shutdown in progress (Goodbye sent/received).""" - - -class GoodbyeReason(IntEnum): - """ - Reason codes for the Goodbye request/response message. - - Sent when gracefully disconnecting from a peer to indicate why - the connection is being closed. - - **Official codes (from spec):** - - +------+---------------------+ - | Code | Meaning | - +======+=====================+ - | 1 | Client shutdown | - +------+---------------------+ - | 2 | Irrelevant network | - +------+---------------------+ - | 3 | Fault/error | - +------+---------------------+ - - References: - ----------- - - Goodbye spec: https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/p2p-interface.md#goodbye-v1 - """ - - CLIENT_SHUTDOWN = 1 - """Node is shutting down normally.""" - - IRRELEVANT_NETWORK = 2 - """Peer is on a different fork or network.""" - - FAULT_OR_ERROR = 3 - """Generic error detected in peer communication.""" diff --git a/src/lean_spec/subspecs/ssz/__init__.py b/src/lean_spec/subspecs/ssz/__init__.py index f0aedc4e..68915b94 100644 --- a/src/lean_spec/subspecs/ssz/__init__.py +++ b/src/lean_spec/subspecs/ssz/__init__.py @@ -1,10 +1,7 @@ """SSZ (Simple Serialize) implementation.""" -from lean_spec.types import ZERO_HASH - from .hash import hash_tree_root __all__ = [ "hash_tree_root", - "ZERO_HASH", ] diff --git a/src/lean_spec/subspecs/ssz/merkleization.py b/src/lean_spec/subspecs/ssz/merkleization.py index 68ee5dc5..53eb6b91 100644 --- a/src/lean_spec/subspecs/ssz/merkleization.py +++ b/src/lean_spec/subspecs/ssz/merkleization.py @@ -146,30 +146,6 @@ def _merkleize_efficient(chunks: list[Bytes32], width: int) -> Bytes32: return level[0] if level else _zero_tree_root(width) -def merkleize_progressive(chunks: Sequence[Bytes32], num_leaves: int = 1) -> Bytes32: - """Progressive Merkleization (per spec). - - Rare in practice; provided for completeness. Splits on `num_leaves`: - - right: merkleize the first up-to-`num_leaves` chunks using a fixed-width tree - - left: recurse on the remaining chunks, quadrupling the right's width at each step - """ - if len(chunks) == 0: - return ZERO_HASH - - # Right branch: fixed-width merkleization of the first `num_leaves` chunks - right = merkleize(chunks[:num_leaves], num_leaves) - - # Left branch: recursively collapse everything beyond `num_leaves` - left = ( - merkleize_progressive(chunks[num_leaves:], num_leaves * 4) - if len(chunks) > num_leaves - else ZERO_HASH - ) - - # Combine branches - return hash_nodes(left, right) - - def mix_in_length(root: Bytes32, length: int) -> Bytes32: """Mix the length (as uint256 little-endian) into a Merkle root.""" if length < 0: diff --git a/src/lean_spec/subspecs/ssz/pack.py b/src/lean_spec/subspecs/ssz/pack.py index c2bff7d1..edcb7f10 100644 --- a/src/lean_spec/subspecs/ssz/pack.py +++ b/src/lean_spec/subspecs/ssz/pack.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Iterable, Sequence +from typing import Sequence from lean_spec.subspecs.ssz.constants import BITS_PER_BYTE, BYTES_PER_CHUNK from lean_spec.types.byte_arrays import Bytes32 @@ -36,14 +36,6 @@ def _partition_chunks(b: bytes) -> list[Bytes32]: return [Bytes32(b[i : i + BYTES_PER_CHUNK]) for i in range(0, len(b), BYTES_PER_CHUNK)] -def pack_basic_serialized(serialized_basic_values: Iterable[bytes]) -> list[Bytes32]: - """Pack serialized basic values into chunks. - - Concatenates and right-pads to produce chunks ready for merkleization. - """ - return _partition_chunks(_right_pad_to_chunk(b"".join(serialized_basic_values))) - - def pack_bytes(data: bytes) -> list[Bytes32]: """Pack raw bytes (e.g. ByteVector/ByteList content) into 32-byte chunks.""" return _partition_chunks(_right_pad_to_chunk(data)) diff --git a/src/lean_spec/subspecs/storage/namespaces.py b/src/lean_spec/subspecs/storage/namespaces.py index d17b8545..608c0587 100644 --- a/src/lean_spec/subspecs/storage/namespaces.py +++ b/src/lean_spec/subspecs/storage/namespaces.py @@ -142,6 +142,3 @@ class SlotIndexNamespace: CHECKPOINTS = CheckpointNamespace() ATTESTATIONS = AttestationNamespace() SLOT_INDEX = SlotIndexNamespace() - -ALL_NAMESPACES = [BLOCKS, STATES, CHECKPOINTS, ATTESTATIONS, SLOT_INDEX] -"""All namespace definitions for schema initialization.""" diff --git a/src/lean_spec/types/base.py b/src/lean_spec/types/base.py index d4e71fbe..2fde9d90 100644 --- a/src/lean_spec/types/base.py +++ b/src/lean_spec/types/base.py @@ -1,6 +1,6 @@ """Reusable, strict base models for the specification.""" -from typing import Any, Self +from typing import Any from pydantic import BaseModel, ConfigDict from pydantic.alias_generators import to_camel @@ -23,10 +23,6 @@ class CamelModel(BaseModel): arbitrary_types_allowed=True, ) - def copy(self: Self, **kwargs: Any) -> Self: - """Create a copy of the model with the updated fields that are validated.""" - return self.__class__(**(self.model_dump(exclude_unset=True) | kwargs)) - def to_json(self, **kwargs: Any) -> dict[str, Any]: """Return json encodable representation of this model""" # remove these if user tries to pass them diff --git a/tests/interop/helpers/__init__.py b/tests/interop/helpers/__init__.py index 46e2502d..976497e5 100644 --- a/tests/interop/helpers/__init__.py +++ b/tests/interop/helpers/__init__.py @@ -1,12 +1,9 @@ """Helper utilities for interop tests.""" from .assertions import ( - assert_all_finalized_to, assert_checkpoint_monotonicity, - assert_head_descends_from, assert_heads_consistent, assert_peer_connections, - assert_same_finalized_checkpoint, ) from .diagnostics import PipelineDiagnostics from .node_runner import NodeCluster @@ -15,12 +12,9 @@ __all__ = [ # Assertions - "assert_all_finalized_to", "assert_checkpoint_monotonicity", - "assert_head_descends_from", "assert_heads_consistent", "assert_peer_connections", - "assert_same_finalized_checkpoint", # Diagnostics "PipelineDiagnostics", # Node management diff --git a/tests/interop/helpers/assertions.py b/tests/interop/helpers/assertions.py index 4a9258bf..f09a6dc5 100644 --- a/tests/interop/helpers/assertions.py +++ b/tests/interop/helpers/assertions.py @@ -11,7 +11,6 @@ import asyncio import logging import time -from typing import Literal from .diagnostics import PipelineDiagnostics from .node_runner import NodeCluster @@ -19,30 +18,6 @@ logger = logging.getLogger(__name__) -async def assert_all_finalized_to( - cluster: NodeCluster, - target_slot: int, - timeout: float = 120.0, -) -> None: - """ - Assert all nodes finalize to at least target_slot. - - Args: - cluster: Node cluster to check. - target_slot: Minimum finalized slot required. - timeout: Maximum wait time in seconds. - - Raises: - AssertionError: If timeout reached before finalization. - """ - success = await cluster.wait_for_finalization(target_slot, timeout) - if not success: - slots = [node.finalized_slot for node in cluster.nodes] - raise AssertionError( - f"Finalization timeout: expected slot >= {target_slot}, got finalized slots {slots}" - ) - - async def assert_heads_consistent( cluster: NodeCluster, max_slot_diff: int = 1, @@ -123,98 +98,6 @@ async def assert_peer_connections( ) -async def assert_same_finalized_checkpoint( - cluster: NodeCluster, - timeout: float = 30.0, -) -> None: - """ - Assert all nodes agree on the finalized checkpoint. - - Args: - cluster: Node cluster to check. - timeout: Maximum wait time. - - Raises: - AssertionError: If nodes disagree on finalized checkpoint. - """ - start = time.monotonic() - - while time.monotonic() - start < timeout: - # Compare (slot, root) tuples. - # Tuples are hashable, so deduplication via set detects disagreement. - checkpoints = [ - (node.node.store.latest_finalized.slot, node.node.store.latest_finalized.root) - for node in cluster.nodes - ] - - # All nodes agree when there is exactly one unique checkpoint. - if len(set(checkpoints)) == 1: - slot, root = checkpoints[0] - logger.debug( - "All nodes agree on finalized checkpoint: slot=%d, root=%s", - slot, - root.hex()[:8], - ) - return - - await asyncio.sleep(0.5) - - # Build a readable summary for the error message. - checkpoints_summary = [] - for node in cluster.nodes: - slot = int(node.node.store.latest_finalized.slot) - root_hex = node.node.store.latest_finalized.root.hex()[:8] - checkpoints_summary.append((slot, root_hex)) - raise AssertionError(f"Finalized checkpoint disagreement: {checkpoints_summary}") - - -def assert_head_descends_from( - cluster: NodeCluster, - checkpoint: Literal["finalized", "justified"], -) -> None: - """ - Verify the fork choice invariant: head must descend from a checkpoint. - - The fork choice algorithm starts from the checkpoint root and walks - forward. If head is not a descendant, the algorithm is broken. - - Walks backward from head toward genesis on each node. - The checkpoint root must appear on this path. - - Args: - cluster: Node cluster to check. - checkpoint: Which checkpoint to verify ancestry against. - - Raises: - AssertionError: If any node's head is not a descendant of the checkpoint. - """ - for node in cluster.nodes: - store = node._store - - cp = store.latest_finalized if checkpoint == "finalized" else store.latest_justified - cp_root = cp.root - cp_slot = int(cp.slot) - - # Walk backward from head toward genesis. - # The checkpoint root must appear on this path. - current_root = store.head - found = False - while current_root in store.blocks: - if current_root == cp_root: - found = True - break - block = store.blocks[current_root] - # Reached genesis without finding the checkpoint. - if int(block.slot) == 0: - break - current_root = block.parent_root - - assert found, ( - f"Node {node.index}: head {store.head.hex()[:8]} is not a descendant " - f"of {checkpoint} root {cp_root.hex()[:8]} at slot {cp_slot}" - ) - - def assert_checkpoint_monotonicity( checkpoint_history: list[list[PipelineDiagnostics]], ) -> None: diff --git a/tests/lean_spec/helpers/__init__.py b/tests/lean_spec/helpers/__init__.py index d3fc07c3..7bfe0068 100644 --- a/tests/lean_spec/helpers/__init__.py +++ b/tests/lean_spec/helpers/__init__.py @@ -21,8 +21,6 @@ make_genesis_state, make_keyed_genesis_state, make_mock_signature, - make_public_key_bytes, - make_signature, make_signed_attestation, make_signed_block, make_signed_block_from_store, @@ -33,9 +31,8 @@ make_test_status, make_validators, make_validators_from_key_manager, - make_validators_with_keys, ) -from .mocks import MockEventSource, MockForkchoiceStore, MockNetworkRequester, MockNoiseSession +from .mocks import MockEventSource, MockForkchoiceStore, MockNetworkRequester TEST_VALIDATOR_ID = ValidatorIndex(0) @@ -58,8 +55,6 @@ "make_genesis_state", "make_keyed_genesis_state", "make_mock_signature", - "make_public_key_bytes", - "make_signature", "make_signed_attestation", "make_signed_block", "make_signed_block_from_store", @@ -70,12 +65,10 @@ "make_test_status", "make_validators", "make_validators_from_key_manager", - "make_validators_with_keys", # Mocks "MockEventSource", "MockForkchoiceStore", "MockNetworkRequester", - "MockNoiseSession", # Constants "TEST_VALIDATOR_ID", ] diff --git a/tests/lean_spec/helpers/builders.py b/tests/lean_spec/helpers/builders.py index 2e3dd12d..c07becac 100644 --- a/tests/lean_spec/helpers/builders.py +++ b/tests/lean_spec/helpers/builders.py @@ -43,12 +43,10 @@ from lean_spec.subspecs.sync.service import SyncService from lean_spec.subspecs.xmss.aggregation import AggregatedSignatureProof, SignatureKey from lean_spec.subspecs.xmss.constants import PROD_CONFIG -from lean_spec.subspecs.xmss.containers import PublicKey, Signature +from lean_spec.subspecs.xmss.containers import Signature from lean_spec.subspecs.xmss.types import ( HashDigestList, - HashDigestVector, HashTreeOpening, - Parameter, Randomness, ) from lean_spec.types import Bytes32, Bytes52, Uint64 @@ -61,18 +59,6 @@ def make_bytes32(seed: int) -> Bytes32: return Bytes32(bytes([seed % 256]) * 32) -def make_public_key_bytes(seed: int) -> bytes: - """ - Encode a deterministic XMSS public key. - - Constructs valid root and parameter vectors seeded by the input. - """ - root = HashDigestVector(data=[Fp(seed + i) for i in range(HashDigestVector.LENGTH)]) - parameter = Parameter(data=[Fp(seed + 100 + i) for i in range(Parameter.LENGTH)]) - public_key = PublicKey(root=root, parameter=parameter) - return public_key.encode_bytes() - - def make_mock_signature() -> Signature: """ Create a minimal mock XMSS signature. @@ -86,20 +72,6 @@ def make_mock_signature() -> Signature: ) -def make_signature(seed: int) -> Signature: - """ - Create a deterministic XMSS signature from a seed. - - Produces unique randomness values based on the seed. - """ - randomness = Randomness(data=[Fp(seed + 200 + i) for i in range(Randomness.LENGTH)]) - return Signature( - path=HashTreeOpening(siblings=HashDigestList(data=[])), - rho=randomness, - hashes=HashDigestList(data=[]), - ) - - def make_validators(count: int) -> Validators: """ Build a validator registry with null public keys. @@ -112,19 +84,6 @@ def make_validators(count: int) -> Validators: return Validators(data=validators) -def make_validators_with_keys(count: int) -> Validators: - """ - Build a validator registry with deterministic XMSS public keys. - - Each validator gets a unique key derived from their index. - """ - validators = [ - Validator(pubkey=Bytes52(make_public_key_bytes(i)), index=ValidatorIndex(i)) - for i in range(count) - ] - return Validators(data=validators) - - def make_validators_from_key_manager(key_manager: XmssKeyManager, count: int) -> Validators: """Build a validator registry with real XMSS keys from a key manager.""" return Validators( diff --git a/tests/lean_spec/helpers/mocks.py b/tests/lean_spec/helpers/mocks.py index 0e25f906..9a6f69d5 100644 --- a/tests/lean_spec/helpers/mocks.py +++ b/tests/lean_spec/helpers/mocks.py @@ -18,57 +18,6 @@ from lean_spec.types import Bytes32 -class MockNoiseSession: - """ - Mock NoiseSession for testing yamux multiplexing. - - Tracks written data and provides configurable read responses. - Does not perform actual encryption or handshake. - """ - - def __init__(self) -> None: - """Initialize with empty buffers.""" - self._written: list[bytes] = [] - self._to_read: list[bytes] = [] - self._closed = False - - @property - def written(self) -> list[bytes]: - """Data written through this session.""" - return self._written - - @property - def is_closed(self) -> bool: - """Whether the session has been closed.""" - return self._closed - - def queue_read(self, data: bytes) -> None: - """ - Queue data to be returned by the next read call. - - Multiple calls queue data in FIFO order. - """ - self._to_read.append(data) - - async def write(self, plaintext: bytes) -> None: - """Record written data for later inspection.""" - self._written.append(plaintext) - - async def read(self) -> bytes: - """ - Return queued data or empty bytes. - - Consumes queued data in FIFO order. - """ - if self._to_read: - return self._to_read.pop(0) - return b"" - - async def close(self) -> None: - """Mark the session as closed.""" - self._closed = True - - class MockNetworkRequester: """Mock network that returns pre-configured blocks and tracks requests.""" diff --git a/tests/lean_spec/subspecs/forkchoice/conftest.py b/tests/lean_spec/subspecs/forkchoice/conftest.py index 9b11caf4..2cea7925 100644 --- a/tests/lean_spec/subspecs/forkchoice/conftest.py +++ b/tests/lean_spec/subspecs/forkchoice/conftest.py @@ -6,65 +6,13 @@ from __future__ import annotations -from typing import Type - import pytest -from lean_spec.subspecs.containers import BlockBody, Checkpoint, State -from lean_spec.subspecs.containers.block import AggregatedAttestations, BlockHeader -from lean_spec.subspecs.containers.config import Config -from lean_spec.subspecs.containers.slot import Slot -from lean_spec.subspecs.containers.state import Validators -from lean_spec.subspecs.containers.state.types import ( - HistoricalBlockHashes, - JustificationRoots, - JustificationValidators, - JustifiedSlots, -) -from lean_spec.subspecs.containers.validator import ValidatorIndex from lean_spec.subspecs.forkchoice import Store -from lean_spec.subspecs.ssz.hash import hash_tree_root -from lean_spec.types import Bytes32, Uint64 +from lean_spec.types import Uint64 from tests.lean_spec.helpers import TEST_VALIDATOR_ID, make_store -class MockState(State): - """Mock state with configurable latest_justified checkpoint.""" - - def __init__(self, latest_justified: Checkpoint) -> None: - """Initialize mock state with minimal defaults.""" - genesis_config = Config( - genesis_time=Uint64(0), - ) - - genesis_header = BlockHeader( - slot=Slot(0), - proposer_index=ValidatorIndex(0), - parent_root=Bytes32.zero(), - state_root=Bytes32.zero(), - body_root=hash_tree_root(BlockBody(attestations=AggregatedAttestations(data=[]))), - ) - - super().__init__( - config=genesis_config, - slot=Slot(0), - latest_block_header=genesis_header, - latest_justified=latest_justified, - latest_finalized=Checkpoint(root=Bytes32.zero(), slot=Slot(0)), - historical_block_hashes=HistoricalBlockHashes(data=[]), - justified_slots=JustifiedSlots(data=[]), - validators=Validators(data=[]), - justifications_roots=JustificationRoots(data=[]), - justifications_validators=JustificationValidators(data=[]), - ) - - -@pytest.fixture -def mock_state_factory() -> Type[MockState]: - """Factory fixture for creating MockState instances.""" - return MockState - - @pytest.fixture def pruning_store() -> Store: """Store with 3 validators for pruning tests.""" diff --git a/tests/lean_spec/subspecs/koalabear/test_field.py b/tests/lean_spec/subspecs/koalabear/test_field.py index 83dc40f8..d4b281b9 100644 --- a/tests/lean_spec/subspecs/koalabear/test_field.py +++ b/tests/lean_spec/subspecs/koalabear/test_field.py @@ -84,95 +84,26 @@ def test_bytes_protocol() -> None: assert isinstance(data, bytes) # Test deserialization - recovered = Fp.from_bytes(data) + recovered = Fp.decode_bytes(data) assert recovered == fp # Test round-trip for various values test_values = [0, 1, 42, 1000, P - 1] for value in test_values: fp = Fp(value=value) - assert Fp.from_bytes(bytes(fp)) == fp + assert Fp.decode_bytes(bytes(fp)) == fp # Test error handling for invalid data length with pytest.raises(ValueError, match="Expected 4 bytes for Fp, got 3"): - Fp.from_bytes(b"\x01\x02\x03") + Fp.decode_bytes(b"\x01\x02\x03") with pytest.raises(ValueError, match="Expected 4 bytes for Fp, got 5"): - Fp.from_bytes(b"\x01\x02\x03\x04\x05") + Fp.decode_bytes(b"\x01\x02\x03\x04\x05") # Test error handling for values exceeding the modulus invalid_data = P.to_bytes(4, byteorder="little") with pytest.raises(ValueError, match="exceeds field modulus"): - Fp.from_bytes(invalid_data) - - -def test_serialize_list() -> None: - """Test serialization of field element lists.""" - # Test empty list - elements: list[Fp] = [] - data = Fp.serialize_list(elements) - assert data == b"" - assert len(data) == 0 - - # Test single element - elements = [Fp(value=42)] - data = Fp.serialize_list(elements) - assert len(data) == 4 - assert data == bytes(Fp(value=42)) - - # Test multiple elements - elements = [Fp(value=1), Fp(value=2), Fp(value=3)] - data = Fp.serialize_list(elements) - assert len(data) == 12 # 3 * 4 bytes - assert data == bytes(Fp(value=1)) + bytes(Fp(value=2)) + bytes(Fp(value=3)) - - # Test round-trip - recovered = Fp.deserialize_list(data, 3) - assert recovered == elements - - -def test_deserialize_list() -> None: - """Test deserialization of field element lists.""" - # Test empty list - data = b"" - recovered = Fp.deserialize_list(data, 0) - assert recovered == [] - - # Test single element - elements = [Fp(value=42)] - data = Fp.serialize_list(elements) - recovered = Fp.deserialize_list(data, 1) - assert recovered == elements - - # Test multiple elements - elements = [Fp(value=10), Fp(value=20), Fp(value=30), Fp(value=40)] - data = Fp.serialize_list(elements) - recovered = Fp.deserialize_list(data, 4) - assert recovered == elements - - # Test error handling for incorrect length - with pytest.raises(ValueError, match="Expected 8 bytes for 2 elements, got 4"): - Fp.deserialize_list(b"\x01\x02\x03\x04", 2) - - with pytest.raises(ValueError, match="Expected 12 bytes for 3 elements, got 10"): - Fp.deserialize_list(b"\x01" * 10, 3) - - -def test_serialize_list_roundtrip_property() -> None: - """Property test: serialization round-trip should preserve values.""" - random.seed(42) - - for _ in range(100): - # Generate random list of field elements - count = random.randint(0, 20) - elements = [Fp(value=random.randint(0, P - 1)) for _ in range(count)] - - # Round-trip - data = Fp.serialize_list(elements) - recovered = Fp.deserialize_list(data, count) - - assert recovered == elements - assert len(data) == count * 4 + Fp.decode_bytes(invalid_data) def test_ssz_type_properties() -> None: @@ -264,11 +195,9 @@ def test_ssz_roundtrip() -> None: data2 = fp.encode_bytes() assert data1 == data2 - # Test all deserialization methods work - recovered1 = Fp.from_bytes(data1) - recovered2 = Fp.decode_bytes(data2) - assert recovered1 == fp - assert recovered2 == fp + # Test deserialization works + recovered = Fp.decode_bytes(data1) + assert recovered == fp def test_ssz_deterministic() -> None: diff --git a/tests/lean_spec/subspecs/networking/gossipsub/integration/conftest.py b/tests/lean_spec/subspecs/networking/gossipsub/integration/conftest.py index 2e4757e0..7093c4da 100644 --- a/tests/lean_spec/subspecs/networking/gossipsub/integration/conftest.py +++ b/tests/lean_spec/subspecs/networking/gossipsub/integration/conftest.py @@ -56,14 +56,3 @@ async def two_nodes( await network.start_all() await nodes[0].connect_to(nodes[1]) return nodes[0], nodes[1] - - -@pytest.fixture -async def three_nodes( - network: GossipsubTestNetwork, -) -> list[GossipsubTestNode]: - """Three fully connected nodes with fast parameters.""" - nodes = await network.create_nodes(3, fast_params()) - await network.start_all() - await network.connect_full() - return nodes diff --git a/tests/lean_spec/subspecs/networking/gossipsub/integration/network.py b/tests/lean_spec/subspecs/networking/gossipsub/integration/network.py index 9d5fe906..5ff6fa01 100644 --- a/tests/lean_spec/subspecs/networking/gossipsub/integration/network.py +++ b/tests/lean_spec/subspecs/networking/gossipsub/integration/network.py @@ -92,17 +92,6 @@ async def connect_full(self) -> None: for node_b in self.nodes[i + 1 :]: await node_a.connect_to(node_b) - async def connect_star(self, center: int = 0) -> None: - """Connect all nodes to a central hub node. - - All traffic flows through the hub. Useful for testing - single-point-of-failure and fan-out scenarios. - """ - hub = self.nodes[center] - for i, node in enumerate(self.nodes): - if i != center: - await hub.connect_to(node) - async def connect_chain(self) -> None: """Connect nodes in a linear chain: 0-1-2-...-N. diff --git a/tests/lean_spec/subspecs/networking/gossipsub/test_cache_edge_cases.py b/tests/lean_spec/subspecs/networking/gossipsub/test_cache_edge_cases.py index cd976eaa..b3d3cfa1 100644 --- a/tests/lean_spec/subspecs/networking/gossipsub/test_cache_edge_cases.py +++ b/tests/lean_spec/subspecs/networking/gossipsub/test_cache_edge_cases.py @@ -169,14 +169,14 @@ def test_put_duplicate_returns_false(self) -> None: assert cache.put("t", msg) is False assert len(cache) == 1 - def test_contains_operator(self) -> None: - """The __contains__ operator works for message IDs.""" + def test_has_method(self) -> None: + """The has() method works for message IDs.""" cache = MessageCache() msg = GossipsubMessage(topic=b"t", raw_data=b"data") cache.put("t", msg) - assert msg.id in cache - assert Bytes20(b"\x00" * 20) not in cache + assert cache.has(msg.id) + assert not cache.has(Bytes20(b"\x00" * 20)) class TestSeenCache: @@ -230,14 +230,14 @@ def test_clear_empties_all(self) -> None: seen.clear() assert len(seen) == 0 - def test_contains_operator(self) -> None: - """The __contains__ operator works for seen message IDs.""" + def test_has_method(self) -> None: + """The has() method works for seen message IDs.""" seen = SeenCache() msg_id = Bytes20(b"12345678901234567890") seen.add(msg_id, time.time()) - assert msg_id in seen - assert Bytes20(b"\x00" * 20) not in seen + assert seen.has(msg_id) + assert not seen.has(Bytes20(b"\x00" * 20)) class TestGossipsubMessageId: @@ -309,20 +309,6 @@ def bad_decompress(_: bytes) -> bytes: assert id_failed == id_none -class TestGossipsubMessageTopicStr: - """Tests for GossipsubMessage.topic_str property.""" - - def test_topic_str_decodes_utf8(self) -> None: - """topic_str decodes bytes to UTF-8 string.""" - msg = GossipsubMessage(topic=b"/eth2/topic", raw_data=b"data") - assert msg.topic_str == "/eth2/topic" - - def test_topic_str_empty(self) -> None: - """topic_str handles empty topic bytes.""" - msg = GossipsubMessage(topic=b"", raw_data=b"data") - assert msg.topic_str == "" - - class TestGossipsubMessageHash: """Tests for GossipsubMessage.__hash__.""" diff --git a/tests/lean_spec/subspecs/networking/gossipsub/test_gossipsub.py b/tests/lean_spec/subspecs/networking/gossipsub/test_gossipsub.py index fa19d20a..5b3da653 100644 --- a/tests/lean_spec/subspecs/networking/gossipsub/test_gossipsub.py +++ b/tests/lean_spec/subspecs/networking/gossipsub/test_gossipsub.py @@ -10,7 +10,6 @@ GossipsubParameters, GossipTopic, TopicKind, - format_topic_string, parse_topic_string, ) from lean_spec.subspecs.networking.gossipsub.mesh import FanoutEntry, MeshState, TopicMesh @@ -25,10 +24,6 @@ Message, SubOpts, create_graft_rpc, - create_ihave_rpc, - create_iwant_rpc, - create_prune_rpc, - create_publish_rpc, create_subscription_rpc, ) @@ -76,16 +71,6 @@ def test_control_message_empty_check(self) -> None: class TestTopicForkValidation: """Test suite for topic fork compatibility validation.""" - def test_is_fork_compatible_matching(self) -> None: - """Test is_fork_compatible returns True for matching fork_digest.""" - topic = GossipTopic(kind=TopicKind.BLOCK, fork_digest="0x12345678") - assert topic.is_fork_compatible("0x12345678") - - def test_is_fork_compatible_mismatched(self) -> None: - """Test is_fork_compatible returns False for mismatched fork_digest.""" - topic = GossipTopic(kind=TopicKind.BLOCK, fork_digest="0x12345678") - assert not topic.is_fork_compatible("0xdeadbeef") - def test_validate_fork_success(self) -> None: """Test validate_fork passes for matching fork_digest.""" topic = GossipTopic(kind=TopicKind.BLOCK, fork_digest="0x12345678") @@ -145,11 +130,6 @@ def test_gossip_topic_factory_methods(self) -> None: kind=TopicKind.ATTESTATION_SUBNET, fork_digest="0xabcd1234", subnet_id=SubnetId(0) ) - def test_format_topic_string(self) -> None: - """Test topic string formatting.""" - result = format_topic_string("block", "0x12345678") - assert result == "/leanconsensus/0x12345678/block/ssz_snappy" - def test_parse_topic_string(self) -> None: """Test topic string parsing.""" assert parse_topic_string("/leanconsensus/0x12345678/block/ssz_snappy") == ( @@ -188,11 +168,11 @@ def test_subscribe_and_unsubscribe(self) -> None: mesh = MeshState(params=GossipsubParameters()) mesh.subscribe("topic1") - assert mesh.is_subscribed("topic1") - assert not mesh.is_subscribed("topic2") + assert "topic1" in mesh.subscriptions + assert "topic2" not in mesh.subscriptions peers = mesh.unsubscribe("topic1") - assert not mesh.is_subscribed("topic1") + assert "topic1" not in mesh.subscriptions assert peers == set() def test_add_remove_mesh_peers(self) -> None: @@ -507,24 +487,6 @@ def test_rpc_helper_functions(self) -> None: control=ControlMessage(graft=[ControlGraft(topic_id="/topic1")]) ) - assert create_prune_rpc(["/topic1"], backoff=120) == RPC( - control=ControlMessage(prune=[ControlPrune(topic_id="/topic1", backoff=120)]) - ) - - assert create_ihave_rpc("/topic1", [b"msg1", b"msg2"]) == RPC( - control=ControlMessage( - ihave=[ControlIHave(topic_id="/topic1", message_ids=[b"msg1", b"msg2"])] - ) - ) - - assert create_iwant_rpc([b"msg1"]) == RPC( - control=ControlMessage(iwant=[ControlIWant(message_ids=[b"msg1"])]) - ) - - assert create_publish_rpc("/topic1", b"data") == RPC( - publish=[Message(topic="/topic1", data=b"data")] - ) - def test_wire_format_compatibility(self) -> None: """Test wire format matches expected protobuf encoding. diff --git a/tests/lean_spec/subspecs/networking/test_peer.py b/tests/lean_spec/subspecs/networking/test_peer.py index 2edf5ea4..397de6b4 100644 --- a/tests/lean_spec/subspecs/networking/test_peer.py +++ b/tests/lean_spec/subspecs/networking/test_peer.py @@ -9,7 +9,7 @@ from lean_spec.subspecs.networking.enr.eth2 import FAR_FUTURE_EPOCH from lean_spec.subspecs.networking.peer import PeerInfo from lean_spec.subspecs.networking.reqresp import Status -from lean_spec.subspecs.networking.types import ConnectionState, Direction, GoodbyeReason, SeqNumber +from lean_spec.subspecs.networking.types import ConnectionState, Direction, SeqNumber from lean_spec.types import Bytes32, Bytes64 @@ -29,16 +29,6 @@ def test_state_values(self) -> None: assert ConnectionState.DISCONNECTING == 4 -class TestGoodbyeReason: - """Tests for GoodbyeReason codes.""" - - def test_official_codes(self) -> None: - """Official spec codes have correct values.""" - assert GoodbyeReason.CLIENT_SHUTDOWN == 1 - assert GoodbyeReason.IRRELEVANT_NETWORK == 2 - assert GoodbyeReason.FAULT_OR_ERROR == 3 - - class TestDirection: """Tests for Direction enum.""" diff --git a/tests/lean_spec/subspecs/networking/transport/identity/test_keypair.py b/tests/lean_spec/subspecs/networking/transport/identity/test_keypair.py index 18633347..baa7455f 100644 --- a/tests/lean_spec/subspecs/networking/transport/identity/test_keypair.py +++ b/tests/lean_spec/subspecs/networking/transport/identity/test_keypair.py @@ -101,7 +101,7 @@ def test_peer_id_uses_secp256k1_key_type(self) -> None: keypair = IdentityKeypair.generate() peer_id = keypair.to_peer_id() - multihash = peer_id.to_bytes() + multihash = peer_id.multihash assert multihash[2] == 0x08 assert multihash[3] == KeyType.SECP256K1 diff --git a/tests/lean_spec/subspecs/networking/transport/test_peer_id.py b/tests/lean_spec/subspecs/networking/transport/test_peer_id.py index 064a9872..a49002d8 100644 --- a/tests/lean_spec/subspecs/networking/transport/test_peer_id.py +++ b/tests/lean_spec/subspecs/networking/transport/test_peer_id.py @@ -273,15 +273,6 @@ def test_different_keys_different_peerids(self) -> None: assert str(peer_id1) != str(peer_id2) - def test_derive_general_function(self) -> None: - """PeerId.derive() works with key data and type.""" - key_data = bytes([0x02] + [0] * 32) # secp256k1 compressed format - peer_id = PeerId.derive(key_data, KeyType.SECP256K1) - - peer_id_str = str(peer_id) - assert len(peer_id_str) > 0 - assert all(c in Base58.ALPHABET for c in peer_id_str) - def test_from_secp256k1_invalid_length(self) -> None: """from_secp256k1 rejects invalid key lengths.""" with pytest.raises(ValueError, match="must be 33 bytes"): @@ -311,7 +302,7 @@ def test_peer_id_uses_sha256_for_large_keys(self) -> None: # Create a key type that produces > 42 bytes encoded # A 128-byte key should exceed the limit large_key = bytes(128) - peer_id = PeerId.derive(large_key, KeyType.RSA) + peer_id = PeerId.from_public_key(PublicKeyProto(key_type=KeyType.RSA, key_data=large_key)) decoded = Base58.decode(str(peer_id)) diff --git a/tests/lean_spec/subspecs/ssz/test_merkleization.py b/tests/lean_spec/subspecs/ssz/test_merkleization.py index ec9e512a..ddfc0b5e 100644 --- a/tests/lean_spec/subspecs/ssz/test_merkleization.py +++ b/tests/lean_spec/subspecs/ssz/test_merkleization.py @@ -7,7 +7,6 @@ from lean_spec.subspecs.ssz.merkleization import ( _zero_tree_root, merkleize, - merkleize_progressive, mix_in_length, mix_in_selector, ) @@ -150,43 +149,3 @@ def test_zero_tree_root_internal() -> None: assert _zero_tree_root(4) == Z[2] assert _zero_tree_root(8) == Z[3] assert _zero_tree_root(16) == Z[4] - - -def test_merkleize_progressive_empty() -> None: - """Tests progressive merkleization of an empty list.""" - assert merkleize_progressive([]) == ZERO_HASH - - -def test_merkleize_progressive_single_chunk() -> None: - """Tests progressive merkleization of a single chunk.""" - # right = merkleize([c[0]], 1) -> c[0] - # left = ZERO_HASH - expected = h(ZERO_HASH, c[0]) - assert merkleize_progressive([c[0]], num_leaves=1) == expected - - -def test_merkleize_progressive_five_chunks() -> None: - """ - Tests progressive merkleization with multiple recursive steps. - Calculates the expected root manually by tracing the spec's logic. - """ - chunks = c[0:5] - - # Manually trace the recursion for `merkleize_progressive(chunks, 1)`: - # Step 1 (num_leaves=1): - # right1 = merkleize([c0], 1) -> c0 - # left1 = merkleize_progressive([c1, c2, c3, c4], 4) - # - # To calculate left1, recurse... - # Step 2 (num_leaves=4): - # right2 = merkleize([c1, c2, c3, c4], 4) -> h(h(c1,c2), h(c3,c4)) - # left2 = ZERO_HASH (no more chunks) - # So, left1 = h(left2, right2) = h(Z[0], right2) - # - # Final result is h(left1, right1) - right2 = h(h(c[1], c[2]), h(c[3], c[4])) - left1 = h(Z[0], right2) - right1 = c[0] - expected = h(left1, right1) - - assert merkleize_progressive(chunks, num_leaves=1) == expected diff --git a/tests/lean_spec/subspecs/ssz/test_pack.py b/tests/lean_spec/subspecs/ssz/test_pack.py index d85947d5..0ee25e22 100644 --- a/tests/lean_spec/subspecs/ssz/test_pack.py +++ b/tests/lean_spec/subspecs/ssz/test_pack.py @@ -10,7 +10,6 @@ from lean_spec.subspecs.ssz.pack import ( _partition_chunks, _right_pad_to_chunk, - pack_basic_serialized, pack_bits, pack_bytes, ) @@ -101,30 +100,6 @@ def test_pack_bytes(payload_hex: str, expected_chunks_hex: PyList[str]) -> None: assert _hex_chunks(out) == expected_chunks_hex -def test_pack_basic_serialized_empty() -> None: - assert pack_basic_serialized([]) == [] - - -def test_pack_basic_serialized_small_values() -> None: - # Two serialized Uint16 (little-endian): 0x4567 -> 67 45, 0x0123 -> 23 01 - values = [b"\x67\x45", b"\x23\x01"] - out = pack_basic_serialized(values) - assert len(out) == 1 - assert out[0].hex() == _pad32_hex("67452301") - - -def test_pack_basic_serialized_multi_chunk() -> None: - # 40 bytes worth of already-serialized basic scalars (e.g., 40 x uint8) - values = [bytes([i]) for i in range(40)] - out = pack_basic_serialized(values) - assert len(out) == 2 - # first chunk: 0..31 - assert out[0].hex() == "".join(f"{i:02x}" for i in range(32)) - # second chunk: 32..39 then padded - tail_hex = "".join(f"{i:02x}" for i in range(32, 40)) - assert out[1].hex() == _pad32_hex(tail_hex) - - def test_pack_bits_empty() -> None: assert pack_bits(()) == [] diff --git a/tests/lean_spec/types/test_bitfields.py b/tests/lean_spec/types/test_bitfields.py index e30f0f37..3cfcef88 100644 --- a/tests/lean_spec/types/test_bitfields.py +++ b/tests/lean_spec/types/test_bitfields.py @@ -5,7 +5,6 @@ import pytest from pydantic import BaseModel, ValidationError -from typing_extensions import Tuple from lean_spec.types.bitfields import BaseBitlist, BaseBitvector from lean_spec.types.boolean import Boolean @@ -218,7 +217,7 @@ class TestBitfieldSerialization: ], ) def test_bitvector_serialization_deserialization( - self, length: int, value: Tuple[bool, ...], expected_hex: str + self, length: int, value: tuple[bool, ...], expected_hex: str ) -> None: """Tests the round trip of serializing and deserializing for Bitvector.""" @@ -247,7 +246,7 @@ class TestBitvector(BaseBitvector): ], ) def test_bitlist_serialization_deserialization( - self, limit: int, value: Tuple[bool, ...], expected_hex: str + self, limit: int, value: tuple[bool, ...], expected_hex: str ) -> None: """Tests the round trip of serializing and deserializing for Bitlist.""" @@ -339,7 +338,7 @@ class Bitlist16(BaseBitlist): ], ) def test_bitvector_encode_decode( - self, length: int, value: Tuple[int, ...], expected_hex: str + self, length: int, value: tuple[int, ...], expected_hex: str ) -> None: class TestBitvector(BaseBitvector): LENGTH = length @@ -375,7 +374,7 @@ class TestBitvector(BaseBitvector): ], ) def test_bitlist_encode_decode( - self, limit: int, value: Tuple[int, ...], expected_hex: str + self, limit: int, value: tuple[int, ...], expected_hex: str ) -> None: class TestBitlist(BaseBitlist): LIMIT = limit diff --git a/tests/lean_spec/types/test_collections.py b/tests/lean_spec/types/test_collections.py index 2b709419..c84659cb 100644 --- a/tests/lean_spec/types/test_collections.py +++ b/tests/lean_spec/types/test_collections.py @@ -1,10 +1,9 @@ """Tests for the SSZVector and List types.""" -from typing import Any, Tuple +from typing import Any import pytest from pydantic import BaseModel, ValidationError -from typing_extensions import Type from lean_spec.subspecs.koalabear import Fp from lean_spec.types.boolean import Boolean @@ -352,7 +351,7 @@ class TestSSZVectorSerialization: ], ) def test_fixed_size_element_vector_serialization( - self, vector_type: Type[SSZVector], value: Tuple[Any, ...], expected_hex: str + self, vector_type: type[SSZVector], value: tuple[Any, ...], expected_hex: str ) -> None: """Tests the serialization of vectors with fixed-size elements.""" instance = vector_type(data=value) @@ -423,7 +422,7 @@ class TestSSZListSerialization: ], ) def test_fixed_size_element_list_serialization( - self, list_type: Type[SSZList], value: Tuple[Any, ...], expected_hex: str + self, list_type: type[SSZList], value: tuple[Any, ...], expected_hex: str ) -> None: """Tests the serialization of lists with fixed-size elements.""" instance = list_type(data=value)