diff --git a/.github/actions/build-evm-client/geth/action.yaml b/.github/actions/build-evm-client/geth/action.yaml index 2cdb333926..e912f36121 100644 --- a/.github/actions/build-evm-client/geth/action.yaml +++ b/.github/actions/build-evm-client/geth/action.yaml @@ -16,21 +16,51 @@ inputs: runs: using: "composite" steps: + - name: Get latest geth commit + id: geth-sha + shell: bash + run: | + SHA=$(git ls-remote https://github.com/${{ inputs.repo }}.git refs/heads/${{ inputs.ref }} | cut -f1) + echo "sha=$SHA" >> $GITHUB_OUTPUT + echo "Latest geth commit: $SHA" + - name: Prepare cache target dir + shell: bash + run: mkdir -p "$GITHUB_WORKSPACE/go-ethereum/cmd/evm" + - name: Restore build cache + id: cache-restore + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 + with: + path: | + ${{ github.workspace }}/go-ethereum/cmd/evm/evm + key: geth-evm-build-sha=${{ steps.geth-sha.outputs.sha }} + restore-keys: | + geth-evm-build- - name: Checkout go-ethereum + if: steps.cache-restore.outputs.cache-hit != 'true' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: repository: ${{ inputs.repo }} - ref: ${{ inputs.ref }} + ref: ${{ steps.geth-sha.outputs.sha }} path: go-ethereum - name: Setup golang + if: steps.cache-restore.outputs.cache-hit != 'true' uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b with: go-version: ${{ inputs.golang }} cache-dependency-path: go-ethereum/go.sum - name: Build evm cmd + if: steps.cache-restore.outputs.cache-hit != 'true' shell: bash run: | - mkdir -p $GITHUB_WORKSPACE/bin cd $GITHUB_WORKSPACE/go-ethereum/cmd/evm go build . - echo $GITHUB_WORKSPACE/go-ethereum/cmd/evm >> $GITHUB_PATH \ No newline at end of file + - name: Add geth evm to PATH + shell: bash + run: echo $GITHUB_WORKSPACE/go-ethereum/cmd/evm >> $GITHUB_PATH + - name: Save build cache + if: steps.cache-restore.outputs.cache-hit != 'true' + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 + with: + path: | + ${{ github.workspace }}/go-ethereum/cmd/evm/evm + key: geth-evm-build-sha=${{ steps.geth-sha.outputs.sha }} diff --git a/.github/configs/evm.yaml b/.github/configs/evm.yaml index 1018923af6..0912e751d6 100644 --- a/.github/configs/evm.yaml +++ b/.github/configs/evm.yaml @@ -12,7 +12,6 @@ static: ref: master targets: ["evmone-t8n"] benchmark: - impl: evmone - repo: ethereum/evmone - ref: master - targets: ["evmone-t8n"] \ No newline at end of file + impl: geth + repo: ethereum/go-ethereum + ref: master \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bf183b8bce..472351d012 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,6 +28,7 @@ This specification aims to be: - Attempt to use descriptive English words (or _very common_ abbreviations) in documentation and identifiers. - Avoid using EIP numbers in identifiers, and prefer descriptive text instead (eg. `FeeMarketTransaction` instead of `Eip1559Transaction`). - If necessary, there is a custom dictionary `whitelist.txt`. +- Avoid uninformative prefixes in identifiers (like `get_` or `compute_`). They don't add useful meaning and take up valuable real estate. #### Comments diff --git a/packages/testing/pyproject.toml b/packages/testing/pyproject.toml index dcc4d4af68..dff6f183ef 100644 --- a/packages/testing/pyproject.toml +++ b/packages/testing/pyproject.toml @@ -39,13 +39,13 @@ dependencies = [ "pydantic>=2.12.3,<3", "rich>=13.7.0,<14", "filelock>=3.15.1,<4", - "ethereum-types>=0.2.1,<0.3", + "ethereum-types>=0.3.0,<0.4", "pyyaml>=6.0.2,<7", "types-pyyaml>=6.0.12.20240917,<7", "pytest-json-report>=1.5.0,<2", "typing-extensions>=4.12.2,<5", "questionary>=2.1.0,<3", - "ethereum-rlp>=0.1.3,<0.2", + "ethereum-rlp>=0.1.5,<0.2", "pytest-regex>=0.2.0,<0.3", "eth-abi>=5.2.0", "joblib>=1.4.2", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py index 73e93d0926..7c1ce4deea 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/execute/execute.py @@ -649,7 +649,6 @@ def base_test_parametrizer_func( class BaseTestWrapper(cls): # type: ignore def __init__(self, *args: Any, **kwargs: Any) -> None: - kwargs["t8n_dump_dir"] = None if "pre" not in kwargs: kwargs["pre"] = pre elif kwargs["pre"] != pre: diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py index 628d8df621..bb9dfea4f9 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/filler.py @@ -10,6 +10,7 @@ import configparser import datetime import gc +import hashlib import json import logging import os @@ -53,6 +54,7 @@ StateFixture, TestInfo, merge_partial_fixture_files, + strip_fixture_format_from_node, ) from execution_testing.fixtures.pre_alloc_groups import ( _get_worker_id, @@ -423,6 +425,58 @@ def save_pre_alloc_groups(self) -> None: ) +@dataclass(kw_only=True) +class TransitionToolCacheStats: + """Stats for caching of the transition tool requests.""" + + key_test_hits: int = 0 + key_test_miss: int = 0 + subkey_test_hits: int = 0 + subkey_test_miss: int = 0 + unique_keys: int = 0 + _seen_keys: Set[str] = field(default_factory=set, repr=False) + + def record_key(self, key: str) -> None: + """Record a cache key and update unique_keys count.""" + self._seen_keys.add(key) + self.unique_keys = len(self._seen_keys) + + @property + def expected_hits(self) -> int: + """Number of tests expected to hit the cache.""" + total_cacheable = self.key_test_hits + self.key_test_miss + return total_cacheable - self.unique_keys + + def to_dict(self) -> Dict[str, int]: + """Convert stats to dict for xdist worker transfer.""" + return { + "key_test_hits": self.key_test_hits, + "key_test_miss": self.key_test_miss, + "subkey_test_hits": self.subkey_test_hits, + "subkey_test_miss": self.subkey_test_miss, + "unique_keys": self.unique_keys, + } + + def add(self, other: "TransitionToolCacheStats") -> None: + """Add another stats object to this one.""" + self.key_test_hits += other.key_test_hits + self.key_test_miss += other.key_test_miss + self.subkey_test_hits += other.subkey_test_hits + self.subkey_test_miss += other.subkey_test_miss + self.unique_keys += other.unique_keys + + @classmethod + def from_dict(cls, data: Dict[str, int]) -> "TransitionToolCacheStats": + """Create stats from dict (xdist worker transfer).""" + return cls( + key_test_hits=data.get("key_test_hits", 0), + key_test_miss=data.get("key_test_miss", 0), + subkey_test_hits=data.get("subkey_test_hits", 0), + subkey_test_miss=data.get("subkey_test_miss", 0), + unique_keys=data.get("unique_keys", 0), + ) + + def calculate_post_state_diff( post_state: BaseAlloc, genesis_state: BaseAlloc ) -> BaseAlloc: @@ -859,6 +913,16 @@ def pytest_configure(config: pytest.Config) -> None: f"{command_line_args}" ) + # Initialize aggregated cache stats on xdist controller. + # Controller = xdist active but not a worker. + numprocesses = config.getoption("numprocesses", None) + is_xdist_active = isinstance(numprocesses, int) and numprocesses > 0 + is_controller = is_xdist_active and not hasattr(config, "workerinput") + if is_controller: + config.t8n_cache_stats_aggregated = ( # type: ignore[attr-defined] + TransitionToolCacheStats() + ) + @pytest.hookimpl(trylast=True) def pytest_report_header(config: pytest.Config) -> List[str]: @@ -907,6 +971,36 @@ def pytest_terminal_summary( yield if config.fixture_output.is_stdout or hasattr(config, "workerinput"): # type: ignore[attr-defined] return + + # Get cache stats: try aggregated (xdist), else local (sequential) + t8n_cache_stats: TransitionToolCacheStats | None = getattr( + config, "t8n_cache_stats_aggregated", None + ) or getattr(config, "transition_tool_cache_stats", None) + + if t8n_cache_stats is not None: + expected = t8n_cache_stats.expected_hits + actual = t8n_cache_stats.key_test_hits + if expected > 0: + efficiency = actual / expected * 100 + terminalreporter.write_sep( + "=", + ( + f" T8n cache: {efficiency:.0f}% hit rate" + f" ({actual}/{expected} tests expected)," + f" {t8n_cache_stats.subkey_test_hits} t8n calls saved" + ), + bold=True, + green=efficiency == 100, + ) + elif t8n_cache_stats.unique_keys > 0: + terminalreporter.write_sep( + "=", + ( + f" T8n cache: {t8n_cache_stats.unique_keys} unique" + " test groups, no cache sharing possible" + ), + bold=True, + ) stats = terminalreporter.stats if "passed" in stats and stats["passed"]: # Custom message for Phase 1 (pre-allocation group generation) @@ -961,6 +1055,15 @@ def pytest_terminal_summary( ) +def _aggregate_cache_stats(node: Any) -> None: + """Aggregate t8n cache stats from an xdist worker.""" + worker_stats = getattr(node, "workeroutput", {}).get("t8n_cache_stats") + if worker_stats and hasattr(node.config, "t8n_cache_stats_aggregated"): + node.config.t8n_cache_stats_aggregated.add( + TransitionToolCacheStats.from_dict(worker_stats) + ) + + def pytest_metadata(metadata: Any) -> None: """Add or remove metadata to/from the pytest report.""" metadata.pop("JAVA_HOME", None) @@ -1077,7 +1180,7 @@ def verify_fixtures_bin(request: pytest.FixtureRequest) -> Path | None: @pytest.fixture(autouse=True, scope="session") -def t8n( +def session_t8n( request: pytest.FixtureRequest, ) -> Generator[TransitionTool, None, None]: """Return configured transition tool.""" @@ -1095,6 +1198,70 @@ def t8n( t8n.shutdown() +def get_t8n_cache_key(request: pytest.FixtureRequest) -> str | None: + """Get the cache key to be used for the current test, if any.""" + mark: pytest.Mark = request.node.get_closest_marker( + "transition_tool_cache_key" + ) + if mark is not None and len(mark.args) == 1: + return f"{strip_fixture_format_from_node(request.node)}-{mark.args[0]}" + return None + + +@pytest.fixture(autouse=True, scope="session") +def transition_tool_cache_stats( + request: pytest.FixtureRequest, +) -> Generator[TransitionToolCacheStats, None, None]: + """Get the transition tool cache stats.""" + stats = TransitionToolCacheStats() + yield stats + # Store stats for later access + request.config.transition_tool_cache_stats = stats # type: ignore[attr-defined] + # For xdist workers, send stats to controller via workeroutput + if hasattr(request.config, "workeroutput"): + request.config.workeroutput["t8n_cache_stats"] = stats.to_dict() + + +@pytest.fixture(autouse=True, scope="function") +def t8n( + request: pytest.FixtureRequest, + session_t8n: TransitionTool, + dump_dir_parameter_level: Path | None, + transition_tool_cache_stats: TransitionToolCacheStats, +) -> Generator[TransitionTool, None, None]: + """Set the transition tool up for the current test.""" + if transition_tool_cache_key := get_t8n_cache_key(request): + # This test is allowed to cache results + transition_tool_cache_stats.record_key(transition_tool_cache_key) + if session_t8n.set_cache(key=transition_tool_cache_key): + transition_tool_cache_stats.key_test_hits += 1 + else: + transition_tool_cache_stats.key_test_miss += 1 + else: + # Test cannot use output cache, remove it + session_t8n.remove_cache() + # Reset the traces + session_t8n.reset_traces() + session_t8n.call_counter = 0 + session_t8n.debug_dump_dir = dump_dir_parameter_level + # TODO: Configure the transition tool to count opcodes only when required. + session_t8n.reset_opcode_count() + yield session_t8n + # Only collect subkey stats for cacheable tests (non-cacheable tests + # still interact with the OutputCache after remove_cache, producing + # phantom misses that would skew the hit rate). + if transition_tool_cache_key and session_t8n.output_cache is not None: + transition_tool_cache_stats.subkey_test_hits += ( + session_t8n.output_cache.hits + ) + transition_tool_cache_stats.subkey_test_miss += ( + session_t8n.output_cache.misses + ) + # Reset counters to avoid double-counting (cache persists across tests) + session_t8n.output_cache.hits = 0 + session_t8n.output_cache.misses = 0 + + @pytest.fixture(scope="session") def do_fixture_verification( request: pytest.FixtureRequest, verify_fixtures_bin: Path | None @@ -1346,11 +1513,21 @@ def filler_path(request: pytest.FixtureRequest) -> Path: return request.config.getoption("filler_path") +def _strip_xdist_group_suffix(s: str) -> str: + """Strip @t8n-cache-* suffix, preserving other xdist_group markers.""" + if "@" in s: + base, suffix = s.rsplit("@", 1) + if suffix.startswith("t8n-cache-"): + return base + return s + + def node_to_test_info(node: pytest.Item) -> TestInfo: """Return test info of the current node item.""" + # Strip xdist group suffix (@groupname) that may be added during execution. return TestInfo( - name=node.name, - id=node.nodeid, + name=_strip_xdist_group_suffix(node.name), + id=_strip_xdist_group_suffix(node.nodeid), original_name=node.originalname, # type: ignore module_path=Path(node.path), ) @@ -1418,7 +1595,6 @@ def base_test_parametrizer_func( reference_spec: ReferenceSpec, pre: Alloc, output_dir: Path, - dump_dir_parameter_level: Path | None, fixture_collector: FixtureCollector, test_case_description: str, fixture_source_url: str, @@ -1448,7 +1624,6 @@ def base_test_parametrizer_func( class BaseTestWrapper(cls): # type: ignore def __init__(self, *args: Any, **kwargs: Any) -> None: - kwargs["t8n_dump_dir"] = dump_dir_parameter_level if "pre" not in kwargs: kwargs["pre"] = pre if "expected_benchmark_gas_used" not in kwargs: @@ -1489,7 +1664,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: group_salt = str(pre_alloc_group_marker.args[0]) else: # We got the marker but unspecified, pass test name - group_salt = request.node.nodeid + group_salt = _strip_xdist_group_suffix( + request.node.nodeid + ) pre_alloc_hash: str | None = None # Phase 1: Generate pre-allocation groups @@ -1497,7 +1674,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # Use the original update_pre_alloc_groups method which # returns the groups assert session.pre_alloc_group_builders is not None - test_id = str(request.node.nodeid) + test_id = _strip_xdist_group_suffix(request.node.nodeid) genesis_environment = self.get_genesis_environment() pre_alloc_hash = pre.compute_pre_alloc_group_hash( fork=fork, @@ -1634,6 +1811,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: ) +@pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems( config: pytest.Config, items: List[pytest.Item | pytest.Function] ) -> None: @@ -1647,8 +1825,6 @@ def pytest_collection_modifyitems( These can't be handled in this plugins pytest_generate_tests() as the fork parametrization occurs in the forks plugin. """ - del config - items_for_removal = [] for i, item in enumerate(items): item.name = item.name.strip().replace(" ", "-") @@ -1707,18 +1883,61 @@ def pytest_collection_modifyitems( for i in reversed(items_for_removal): items.pop(i) - # Schedule slow-marked tests first (Longest Processing Time First). - # Workers each grab the next test from the queue, so slow tests get - # distributed across workers and finish before the fast-test tail. - slow_items = [] - normal_items = [] + # Build base_nodeid cache and identify slow groups. + # If ANY fixture format variant is marked slow, treat ALL variants as slow + # to keep them grouped together for cache locality. + item_base_nodeids: Dict[int, str] = {} + slow_base_nodeids: set[str] = set() for item in items: + base_nodeid = strip_fixture_format_from_node(item) + item_base_nodeids[id(item)] = base_nodeid if item.get_closest_marker("slow") is not None: - slow_items.append(item) - else: - normal_items.append(item) - if slow_items: - items[:] = slow_items + normal_items + slow_base_nodeids.add(base_nodeid) + + # Sort items for optimal execution order: + # 1. Slow groups first (LPT scheduling for xdist load balance) + # 2. Related fixture formats together (cache locality) + # 3. Cacheable formats first within a group (so non-cacheable formats + # don't clear the cache between two cacheable ones; e.g., for + # StateTest the _from_state_test labels sort engine_x between the + # two cacheable formats alphabetically, breaking cache hits) + # 4. Deterministic order within groups (alphabetical by nodeid) + def sort_key(item: pytest.Item) -> tuple[bool, str, bool, str]: + base = item_base_nodeids[id(item)] + is_slow = base in slow_base_nodeids + has_cache_key = ( + item.get_closest_marker("transition_tool_cache_key") is not None + ) + return (not is_slow, base, not has_cache_key, item.nodeid) + + items.sort(key=sort_key) + + # Group related fixture formats for cache locality with xdist. + # Detect xdist: check for -n in original args (collection happens before + # xdist initializes, so config.option.numprocesses is None). + orig_args = ( + config.invocation_params.args + if hasattr(config, "invocation_params") + else [] + ) + is_xdist = any(arg == "-n" or arg.startswith("-n") for arg in orig_args) + + if is_xdist: + # Add xdist_group markers for --dist=loadgroup. + # Skip if test already has an xdist_group marker (e.g., bigmem). + # Tests with existing markers still benefit from the cache within their + # worker, just with potentially more interleaving. + # IMPORTANT: Use hash for group name because loadgroup's _split_scope + # uses rfind("]") to detect group suffix, and our base_nodeid contains + # "]" characters which would break the detection. + for item in items: + if not item.get_closest_marker("xdist_group"): + base_nodeid = item_base_nodeids[id(item)] + h = hashlib.md5( + base_nodeid.encode(), usedforsecurity=False + ).hexdigest()[:8] + group_name = f"t8n-cache-{h}" + item.add_marker(pytest.mark.xdist_group(name=group_name)) def _verify_fixtures_post_merge( @@ -1929,9 +2148,10 @@ def pytest_testnodedown(node: Any, error: Any) -> None: """ Called on master when a worker node finishes. - Prints any timing logs collected by the worker during sessionfinish. + Aggregate cache stats and print timing logs from the worker. """ del error + _aggregate_cache_stats(node) logger = logging.getLogger("fill.sessionfinish") worker_id = getattr(node, "workerinput", {}).get("workerid", "unknown") timing_logs = getattr(node, "workeroutput", {}).get("timing_logs", []) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py index 75f6ee6e5d..5044ba0109 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/pre_alloc.py @@ -422,6 +422,10 @@ def node_id_for_entropy( across fixture types and forks for the same test. """ node_id: str = request.node.nodeid + # Strip xdist group suffix (e.g., @t8n-cache-abc12345) so entropy is + # deterministic regardless of whether xdist is active. + if "@" in node_id: + node_id = node_id.rsplit("@", 1)[0] if fork is None: # FIXME: Static tests don't have a fork, so we need to get it from the # node. @@ -429,7 +433,7 @@ def node_id_for_entropy( fork = request.node.fork for fixture_format_name in ALL_FIXTURE_FORMAT_NAMES: if fixture_format_name in node_id: - parts = request.node.nodeid.split("::") + parts = node_id.split("::") test_file_path = parts[0] test_name = "::".join(parts[1:]) stripped_test_name = test_name.replace( diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py index 2dd85aa18f..293c5d74f7 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_benchmarking.py @@ -1,6 +1,7 @@ """Test the benchmarking pytest plugin for gas benchmark values.""" import json +import os import textwrap from pathlib import Path from typing import List @@ -12,8 +13,8 @@ OpcodeCountsConfig, ) -# EVM binary for tests that actually fill (not just collect) -BENCHMARK_EVM_T8N = "evmone-t8n" +# EVM binary for fill tests; defaults to geth evm +BENCHMARK_EVM_T8N = os.environ.get("EVM_BIN", "evm") test_module_dummy = textwrap.dedent( """\ @@ -764,9 +765,20 @@ def test_fixed_opcode_count_config_file_parametrized( ) ) + # Use subprocess mode to isolate each parametrized inner session. + # pytester defaults to in-process mode, which shares the Python + # interpreter across all inner sessions in the same test run. + # Pydantic's ModelMetaclass caches __init__ wrappers for dynamically + # created classes (like BaseTestWrapper); when a second in-process + # session creates a new BaseTestWrapper, the cached wrapper re-invokes + # __init__ re-entrantly, causing generate() to run twice per test and + # doubling the opcode count. This is strictly a pytester/in-process + # artifact — normal `fill` runs are unaffected because each fill + # invocation is a fresh Python process. + # # Place --fixed-opcode-count after test path to avoid argparse consuming # the path as the option value (nargs='?' behavior) - result = pytester.runpytest( + result = pytester.runpytest_subprocess( "-c", "pytest-fill.ini", "--fork", @@ -896,7 +908,10 @@ def test_fixed_opcode_count_per_parameter_patterns( config_file = pytester.path / ".fixed_opcode_counts.json" config_file.write_text(json.dumps({"scenario_configs": config})) - result = pytester.runpytest( + # Subprocess mode: avoids Pydantic metaclass cache pollution across + # in-process pytester sessions (see comment in + # test_fixed_opcode_count_config_file_parametrized). + result = pytester.runpytest_subprocess( "-c", "pytest-fill.ini", "--fork", @@ -932,7 +947,10 @@ def test_cli_mode_ignores_per_parameter_patterns( pytester, test_module_parametrized, "test_cli_mode.py" ) - result = pytester.runpytest( + # Subprocess mode: avoids Pydantic metaclass cache pollution across + # in-process pytester sessions (see comment in + # test_fixed_opcode_count_config_file_parametrized). + result = pytester.runpytest_subprocess( "-c", "pytest-fill.ini", "--fork", diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_t8n_cache.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_t8n_cache.py new file mode 100644 index 0000000000..912c3a1d98 --- /dev/null +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/filler/tests/test_t8n_cache.py @@ -0,0 +1,610 @@ +"""Unit tests for the t8n output cache functionality.""" + +import hashlib +from typing import Any +from unittest.mock import sentinel + +import pytest + +from execution_testing.client_clis.transition_tool import OutputCache +from execution_testing.fixtures import ( + BlockchainEngineFixture, + BlockchainFixture, + FixtureFormat, + LabeledFixtureFormat, + StateFixture, + strip_fixture_format_from_node, +) + +from ...shared.helpers import labeled_format_parameter_set +from ..filler import _strip_xdist_group_suffix + + +class MockItem: + """Mock pytest.Item for testing collection sorting behavior.""" + + nodeid: str + name: str + _markers: list[pytest.Mark] + + def __init__( + self, + nodeid: str, + fixture_format: LabeledFixtureFormat | FixtureFormat | None, + name: str | None = None, + ) -> None: + """Initialize name from nodeid if not provided.""" + self.nodeid = nodeid + if not name: + parts = nodeid.split("::") + name = parts[-1] if "::" in nodeid else nodeid + self.name = name + self._markers = [] + if fixture_format is not None: + param = labeled_format_parameter_set(fixture_format) + for mark in param.marks: + self._markers.append(mark) # type: ignore[arg-type] + + def get_closest_marker(self, name: str) -> pytest.Mark | None: + """Return marker by name if present.""" + for marker in self._markers: + if marker.name == name: + return marker + return None + + def add_marker(self, marker: Any) -> None: + """Add a marker to the item.""" + self._markers.append(marker) + + +class TestStripFixtureFormatFromNodeid: + """Test cases for strip_fixture_format_from_node function.""" + + def test_strip_blockchain_test(self) -> None: + """Test stripping blockchain_test format.""" + item = MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test]", + BlockchainFixture, + ) + expected = "tests/test.py::test_foo[fork_Osaka-]" + assert strip_fixture_format_from_node(item) == expected + + def test_strip_blockchain_test_engine(self) -> None: + """Test stripping blockchain_test_engine format.""" + item = MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ) + expected = "tests/test.py::test_foo[fork_Osaka-]" + assert strip_fixture_format_from_node(item) == expected + + def test_strip_state_test(self) -> None: + """Test stripping state_test format.""" + item = MockItem( + "tests/test.py::test_foo[fork_Osaka-state_test]", + StateFixture, + ) + expected = "tests/test.py::test_foo[fork_Osaka-]" + assert strip_fixture_format_from_node(item) == expected + + def test_strip_format_in_middle(self) -> None: + """Test stripping format when it's in the middle of params.""" + item = MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test-param1]", + BlockchainFixture, + ) + expected = "tests/test.py::test_foo[fork_Osaka--param1]" + assert strip_fixture_format_from_node(item) == expected + + def test_no_format_unchanged(self) -> None: + """Test that nodeids without fixture format are unchanged.""" + item = MockItem( + "tests/test.py::test_foo[fork_Osaka-some_param]", + None, + ) + assert strip_fixture_format_from_node(item) == item.nodeid + + def test_no_params_unchanged(self) -> None: + """Test that nodeids without parameters are unchanged.""" + item = MockItem( + "tests/test.py::test_foo", + None, + ) + assert strip_fixture_format_from_node(item) == item.nodeid + + def test_empty_params_unchanged(self) -> None: + """Test that nodeids with empty params are unchanged.""" + item = MockItem( + "tests/test.py::test_foo[]", + None, + ) + assert strip_fixture_format_from_node(item) == item.nodeid + + def test_format_at_start(self) -> None: + """Test stripping format at start of params.""" + item = MockItem( + "tests/test.py::test_foo[blockchain_test-fork_Osaka]", + BlockchainFixture, + ) + expected = "tests/test.py::test_foo[-fork_Osaka]" + assert strip_fixture_format_from_node(item) == expected + + def test_only_format(self) -> None: + """Test stripping format at start of params.""" + item = MockItem( + "tests/test.py::test_foo[blockchain_test]", + BlockchainFixture, + ) + expected = "tests/test.py::test_foo[]" + assert strip_fixture_format_from_node(item) == expected + + def test_related_formats_same_base(self) -> None: + """Test that related formats produce the same base nodeid.""" + base_nodeid = "tests/test.py::test_foo[fork_Osaka--param1]" + + node_bt = MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test-param1]", + BlockchainFixture, + ) + node_bte = MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test_engine-param1]", + BlockchainEngineFixture, + ) + + # Both should strip to the same base. + assert strip_fixture_format_from_node(node_bt) == base_nodeid + assert strip_fixture_format_from_node(node_bte) == base_nodeid + + def test_longer_format_matched_first(self) -> None: + """Test that longer format names are matched before shorter ones.""" + # blockchain_test_engine should match before blockchain_test. + node = MockItem( + "tests/test.py::test[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ) + expected = "tests/test.py::test[fork_Osaka-]" + result = strip_fixture_format_from_node(node) + assert result == expected + # Verify it didn't partially match blockchain_test. + assert "blockchain_test" not in result + + +class TestCacheKeyConsistency: + """Test that cache keys are consistent across fixture formats.""" + + @pytest.mark.parametrize( + "labeled_fixture_format,format_name", + [ + (BlockchainFixture, "blockchain_test"), + (BlockchainEngineFixture, "blockchain_test_engine"), + (StateFixture, "state_test"), + ( + LabeledFixtureFormat( + BlockchainFixture, "blockchain_test_from_state_test", "" + ), + "blockchain_test_from_state_test", + ), + ( + LabeledFixtureFormat( + BlockchainEngineFixture, + "blockchain_test_engine_from_state_test", + "", + ), + "blockchain_test_engine_from_state_test", + ), + ], + ) + def test_format_stripping_produces_consistent_key( + self, labeled_fixture_format: LabeledFixtureFormat, format_name: str + ) -> None: + """Test that all format variants produce the same base key.""" + base = "tests/test.py::test_case[fork_Osaka--param1]" + nodeid = f"tests/test.py::test_case[fork_Osaka-{format_name}-param1]" + node = MockItem(nodeid, labeled_fixture_format) + + result = strip_fixture_format_from_node(node) + assert result == base, f"Format {format_name} did not strip correctly" + + +class TestStripXdistGroupSuffix: + """Test cases for _strip_xdist_group_suffix function.""" + + def test_strips_t8n_cache_suffix(self) -> None: + """Test that t8n-cache-* suffixes are stripped.""" + nodeid = "test.py::test[params]@t8n-cache-12345678" + expected = "test.py::test[params]" + assert _strip_xdist_group_suffix(nodeid) == expected + + def test_preserves_other_group_suffixes(self) -> None: + """Test that non-cache group suffixes (e.g., bigmem) are preserved.""" + nodeid = "test.py::test[params]@bigmem" + assert _strip_xdist_group_suffix(nodeid) == nodeid + + def test_preserves_custom_group_suffixes(self) -> None: + """Test that custom xdist_group markers are preserved.""" + nodeid = "test.py::test[params]@custom_group" + assert _strip_xdist_group_suffix(nodeid) == nodeid + + def test_no_suffix_unchanged(self) -> None: + """Test that nodeids without @ are unchanged.""" + nodeid = "test.py::test[params]" + assert _strip_xdist_group_suffix(nodeid) == nodeid + + def test_at_in_params_preserved(self) -> None: + """Test that @ in params (not suffix) is preserved.""" + # This tests the rsplit behavior - only the last @ is considered. + nodeid = "test.py::test[email@example.com]@t8n-cache-abc" + expected = "test.py::test[email@example.com]" + assert _strip_xdist_group_suffix(nodeid) == expected + + +class TestCacheExecutionOrder: + """Test that execution order maximizes cache hits.""" + + def test_blockchain_test_sorts_before_blockchain_test_engine(self) -> None: + """Test blockchain_test < blockchain_test_engine alphabetically.""" + # Alphabetical order determines which format runs first. + assert "blockchain_test" < "blockchain_test_engine" + + def test_related_formats_group_together_when_sorted(self) -> None: + """Test that sorting by base nodeid groups related formats together.""" + nodes = [ + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_bar[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + MockItem( + "tests/test.py::test_bar[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + ] + + # Sort by base nodeid (as the collection hook does). + sorted_nodes = sorted(nodes, key=strip_fixture_format_from_node) + + # Related formats should be adjacent after sorting. + test_bar_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_bar" in n.nodeid + ] + test_foo_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_foo" in n.nodeid + ] + + # Check adjacency: indices should be consecutive. + assert test_bar_indices == [0, 1] or test_bar_indices == [2, 3] + assert test_foo_indices == [0, 1] or test_foo_indices == [2, 3] + + def test_related_formats_grouped_when_sorted(self) -> None: + """Test sorting groups related formats together (same base nodeid).""" + nodes = [ + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_bar[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + ] + + # Sort by base nodeid. + sorted_nodes = sorted(nodes, key=strip_fixture_format_from_node) + + # test_bar items should be adjacent, test_foo items should be adjacent. + foo_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_foo" in n.nodeid + ] + bar_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_bar" in n.nodeid + ] + + # Check foo items are adjacent (difference is 1). + assert max(foo_indices) - min(foo_indices) == len(foo_indices) - 1 + # Check bar items are adjacent (just one item here). + assert len(bar_indices) == 1 + + def test_sorting_groups_multiple_tests_by_base_nodeid(self) -> None: + """Test sorting groups items by base nodeid.""" + nodes = [ + # Deliberately interleaved: test_a and test_b formats mixed. + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + ] + + # Sort by base nodeid. + sorted_nodes = sorted(nodes, key=strip_fixture_format_from_node) + + # After sorting, test_a formats should be adjacent, test_b adjacent. + test_a_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_a" in n.nodeid + ] + test_b_indices = [ + i for i, n in enumerate(sorted_nodes) if "test_b" in n.nodeid + ] + + # Check test_a items are adjacent. + assert max(test_a_indices) - min(test_a_indices) == 1 + # Check test_b items are adjacent. + assert max(test_b_indices) - min(test_b_indices) == 1 + + +class TestCollectionSortingBehavior: + """Test collection sorting behavior ensures cache hits.""" + + def _sort_items_by_base_nodeid(self, items: list[MockItem]) -> None: + """Sort items by base nodeid (cache-friendly order).""" + items.sort(key=lambda item: strip_fixture_format_from_node(item)) + + def _add_xdist_markers(self, items: list[MockItem]) -> None: + """Add xdist_group markers based on base nodeid hash.""" + for item in items: + base_nodeid = strip_fixture_format_from_node(item) + h = hashlib.md5( + base_nodeid.encode(), usedforsecurity=False + ).hexdigest()[:8] + item.add_marker(pytest.mark.xdist_group(name=f"t8n-cache-{h}")) + + def _simulate_collection_without_xdist( + self, items: list[MockItem] + ) -> None: + """Simulate collection behavior WITHOUT xdist (sorts items).""" + self._sort_items_by_base_nodeid(items) + + def _simulate_collection_with_xdist(self, items: list[MockItem]) -> None: + """Simulate collection behavior WITH xdist (adds markers AND sorts).""" + self._add_xdist_markers(items) + self._sort_items_by_base_nodeid(items) + + def test_items_sorted_without_xdist(self) -> None: + """Test that items are sorted when xdist is NOT enabled.""" + items = [ + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + ] + + self._simulate_collection_without_xdist(items) + + # After sorting by base nodeid, test_a should come before test_b. + assert "test_a" in items[0].nodeid + assert "test_b" in items[1].nodeid + + def test_items_sorted_with_xdist(self) -> None: + """Test items are sorted with xdist for cache locality.""" + items = [ + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + ] + + self._simulate_collection_with_xdist(items) + + # After sorting by base nodeid, test_a should come before test_b. + assert "test_a" in items[0].nodeid + assert "test_b" in items[1].nodeid + + def test_xdist_groups_have_consistent_hash(self) -> None: + """Test xdist_group markers use consistent hashes.""" + items = [ + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_foo[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + ] + + self._simulate_collection_with_xdist(items) + + marker0 = items[0].get_closest_marker("xdist_group") + marker1 = items[1].get_closest_marker("xdist_group") + + assert marker0 is not None, "First item needs xdist_group marker." + assert marker1 is not None, "Second item needs xdist_group marker." + + group0 = marker0.kwargs.get("name", "") + group1 = marker1.kwargs.get("name", "") + + assert group0 == group1, ( + f"Related formats should have the same xdist_group. " + f"Got: {group0} vs {group1}" + ) + assert group0.startswith("t8n-cache-") + + def test_xdist_sorting_groups_related_formats(self) -> None: + """Test xdist collection groups related formats together.""" + items = [ + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test]", + BlockchainFixture, + ), + MockItem( + "tests/test.py::test_b[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + MockItem( + "tests/test.py::test_a[fork_Osaka-blockchain_test_engine]", + BlockchainEngineFixture, + ), + ] + + self._simulate_collection_with_xdist(items) + + # Items should be sorted so related formats are adjacent. + nodeids = [i.nodeid for i in items] + # test_a variants should be together, test_b variants together. + test_a_indices = [i for i, n in enumerate(nodeids) if "test_a" in n] + test_b_indices = [i for i, n in enumerate(nodeids) if "test_b" in n] + + # Check adjacency (difference between indices should be 1). + assert max(test_a_indices) - min(test_a_indices) == 1 + assert max(test_b_indices) - min(test_b_indices) == 1 + + +class TestOutputCache: + """Unit tests for the OutputCache single-key cache.""" + + def test_initial_state(self) -> None: + """Test cache starts empty with no key.""" + cache = OutputCache() + assert cache.key is None + assert cache.hits == 0 + assert cache.misses == 0 + + def test_set_key_returns_false_on_first_call(self) -> None: + """Test set_key returns False (miss) for a new key.""" + cache = OutputCache() + assert cache.set_key("test-key") is False + + def test_set_key_returns_true_on_repeat(self) -> None: + """Test set_key returns True (hit) when key is unchanged.""" + cache = OutputCache() + cache.set_key("test-key") + assert cache.set_key("test-key") is True + + def test_set_key_clears_on_change(self) -> None: + """Test set_key clears cached data when key changes.""" + cache = OutputCache() + cache.set_key("key-a") + cache.set(0, sentinel.output_a) + assert cache.get(0) is sentinel.output_a + + # Changing key should evict all cached data. + assert cache.set_key("key-b") is False + assert cache.get(0) is None + + def test_get_set_round_trip(self) -> None: + """Test get returns what was stored by set.""" + cache = OutputCache() + cache.set_key("key") + cache.set(0, sentinel.output_0) + cache.set(1, sentinel.output_1) + assert cache.get(0) is sentinel.output_0 + assert cache.get(1) is sentinel.output_1 + + def test_get_missing_subkey_returns_none(self) -> None: + """Test get returns None for a subkey that was never set.""" + cache = OutputCache() + cache.set_key("key") + assert cache.get(42) is None + + def test_hit_counter(self) -> None: + """Test hits increment on cache hits.""" + cache = OutputCache() + cache.set_key("key") + cache.set(0, sentinel.output) + cache.get(0) + cache.get(0) + assert cache.hits == 2 + assert cache.misses == 0 + + def test_miss_counter(self) -> None: + """Test misses increment on cache misses.""" + cache = OutputCache() + cache.set_key("key") + + cache.get(0) + cache.get(1) + assert cache.misses == 2 + assert cache.hits == 0 + + def test_mixed_hit_miss_counters(self) -> None: + """Test hits and misses accumulate independently.""" + cache = OutputCache() + cache.set_key("key") + cache.set(0, sentinel.output) + cache.get(0) # hit + cache.get(1) # miss + cache.get(0) # hit + cache.get(2) # miss + assert cache.hits == 2 + assert cache.misses == 2 + + def test_clear_resets_key_and_data(self) -> None: + """Test clear removes cached data and resets the key.""" + cache = OutputCache() + cache.set_key("key") + cache.set(0, sentinel.output) + cache.clear() + assert cache.key is None + assert cache.get(0) is None + + def test_clear_preserves_counters(self) -> None: + """Test clear does not reset hit/miss counters.""" + cache = OutputCache() + cache.set_key("key") + cache.set(0, sentinel.output) + cache.get(0) # hit + cache.get(1) # miss + + cache.clear() + assert cache.hits == 1 + assert cache.misses == 1 + + def test_set_key_after_clear(self) -> None: + """Test cache is usable again after clear.""" + cache = OutputCache() + cache.set_key("key-a") + cache.set(0, sentinel.output_a) + cache.clear() + + cache.set_key("key-b") + cache.set(0, sentinel.output_b) + assert cache.get(0) is sentinel.output_b + + def test_counters_survive_key_change(self) -> None: + """Test hit/miss counters accumulate across key changes.""" + cache = OutputCache() + cache.set_key("key-a") + cache.set(0, sentinel.output) + cache.get(0) # hit + cache.get(1) # miss + + cache.set_key("key-b") + cache.set(0, sentinel.output) + cache.get(0) # hit + + assert cache.hits == 2 + assert cache.misses == 1 diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py index bd572fae28..a0c877bdc3 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/execute_fill.py @@ -184,6 +184,15 @@ def pytest_configure(config: pytest.Config) -> None: "pre_alloc_mutable: Marks a test to allow impossible mutations in the " "pre-state.", ) + config.addinivalue_line( + "markers", + "fixture_format_id: ID used to describe the fixture format.", + ) + config.addinivalue_line( + "markers", + "transition_tool_cache_key: Key used to match the transition tool " + "cache for the test during fill.", + ) @pytest.fixture(scope="function") diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/helpers.py b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/helpers.py index 1da0a41fd9..8b37e01909 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/helpers.py +++ b/packages/testing/src/execution_testing/cli/pytest_commands/plugins/shared/helpers.py @@ -46,12 +46,22 @@ def labeled_format_parameter_set( The label will be used in the test id and also will be added as a marker to the generated test case when filling/executing the test. """ + transition_tool_cache_key = getattr( + format_with_or_without_label, "transition_tool_cache_key", "" + ) + if transition_tool_cache_key: + marks = [ + pytest.mark.transition_tool_cache_key(transition_tool_cache_key), + ] + else: + marks = [] if isinstance( format_with_or_without_label, LabeledExecuteFormat ) or isinstance(format_with_or_without_label, LabeledFixtureFormat): + parameter_id = format_with_or_without_label.label return pytest.param( format_with_or_without_label.format, - id=format_with_or_without_label.label, + id=parameter_id, marks=[ getattr( pytest.mark, @@ -59,20 +69,25 @@ def labeled_format_parameter_set( ), getattr( pytest.mark, - format_with_or_without_label.label.lower(), + parameter_id.lower(), ), - ], + pytest.mark.fixture_format_id(parameter_id), + ] + + marks, ) else: + parameter_id = format_with_or_without_label.format_name.lower() return pytest.param( format_with_or_without_label, - id=format_with_or_without_label.format_name.lower(), + id=parameter_id, marks=[ getattr( pytest.mark, - format_with_or_without_label.format_name.lower(), - ) - ], + parameter_id, + ), + pytest.mark.fixture_format_id(parameter_id), + ] + + marks, ) diff --git a/packages/testing/src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini b/packages/testing/src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini index 74d65dd5e7..15d8680559 100644 --- a/packages/testing/src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini +++ b/packages/testing/src/execution_testing/cli/pytest_commands/pytest_ini_files/pytest-fill.ini @@ -17,6 +17,7 @@ addopts = -p execution_testing.cli.pytest_commands.plugins.help.help -p execution_testing.cli.pytest_commands.plugins.custom_logging.plugin_logging --tb short + --dist loadgroup --ignore tests/cancun/eip4844_blobs/point_evaluation_vectors/ --ignore tests/json_infra --ignore tests/evm_tools diff --git a/packages/testing/src/execution_testing/client_clis/cli_types.py b/packages/testing/src/execution_testing/client_clis/cli_types.py index 333e110358..3a39479eca 100644 --- a/packages/testing/src/execution_testing/client_clis/cli_types.py +++ b/packages/testing/src/execution_testing/client_clis/cli_types.py @@ -216,6 +216,7 @@ def print(self) -> None: _opcode_synonyms = { "KECCAK256": "SHA3", + "DIFFICULTY": "PREVRANDAO", } diff --git a/packages/testing/src/execution_testing/client_clis/clis/besu.py b/packages/testing/src/execution_testing/client_clis/clis/besu.py index 07b5ca4990..cd97d4c698 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/besu.py +++ b/packages/testing/src/execution_testing/client_clis/clis/besu.py @@ -31,6 +31,7 @@ from ..ethereum_cli import EthereumCLI from ..fixture_consumer_tool import FixtureConsumerTool from ..transition_tool import ( + Profiler, TransitionTool, dump_files_to_directory, model_dump_config, @@ -95,7 +96,7 @@ def _consume_debug_dump( """ ) dump_files_to_directory( - str(debug_output_path), + debug_output_path, { "consume_direct_args.py": command, "consume_direct_returncode.txt": result.returncode, @@ -193,15 +194,16 @@ def shutdown(self) -> None: if self.besu_trace_dir: self.besu_trace_dir.cleanup() - def evaluate( + def _evaluate( self, *, transition_tool_data: TransitionTool.TransitionToolData, - debug_output_path: str = "", - slow_request: bool = False, + debug_output_path: Path | None, + slow_request: bool, + profiler: Profiler, ) -> TransitionToolOutput: """Execute `evm t8n` with the specified arguments.""" - del slow_request + del slow_request, profiler if not self.process: self.start_server() diff --git a/packages/testing/src/execution_testing/client_clis/clis/evmone.py b/packages/testing/src/execution_testing/client_clis/clis/evmone.py index e474e752aa..8157d29f93 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/evmone.py +++ b/packages/testing/src/execution_testing/client_clis/clis/evmone.py @@ -132,7 +132,7 @@ def _consume_debug_dump( """ ) dump_files_to_directory( - str(debug_output_path), + debug_output_path, { "consume_direct_args.py": command, "consume_direct_returncode.txt": result.returncode, diff --git a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py index 4066124e4e..cd6100495b 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py +++ b/packages/testing/src/execution_testing/client_clis/clis/execution_specs.py @@ -19,6 +19,7 @@ dump_files_to_directory, ) from execution_testing.client_clis.transition_tool import ( + Profiler, TransitionTool, model_dump_config, ) @@ -62,17 +63,18 @@ def is_fork_supported(self, fork: Fork) -> bool: """Return True if the fork is supported by the tool.""" return fork.transition_tool_name() in get_supported_forks() - def evaluate( + def _evaluate( self, *, transition_tool_data: TransitionTool.TransitionToolData, - debug_output_path: str = "", - slow_request: bool = False, + debug_output_path: Path | None, + slow_request: bool, + profiler: Profiler, ) -> TransitionToolOutput: """ Evaluate using the EELS T8N entry point. """ - del slow_request + del slow_request, profiler request_data = transition_tool_data.get_request_data() request_data_json = request_data.model_dump( mode="json", **model_dump_config diff --git a/packages/testing/src/execution_testing/client_clis/clis/geth.py b/packages/testing/src/execution_testing/client_clis/clis/geth.py index 0ac1be64d4..80dd6ea77e 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/geth.py +++ b/packages/testing/src/execution_testing/client_clis/clis/geth.py @@ -231,7 +231,7 @@ def _consume_debug_dump( """ ) dump_files_to_directory( - str(debug_output_path), + debug_output_path, { "consume_direct_args.py": command, "consume_direct_returncode.txt": result.returncode, diff --git a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py index b98760040b..8ef9411886 100644 --- a/packages/testing/src/execution_testing/client_clis/clis/nethermind.py +++ b/packages/testing/src/execution_testing/client_clis/clis/nethermind.py @@ -83,7 +83,7 @@ def _consume_debug_dump( ) dump_files_to_directory( - str(debug_output_path), + debug_output_path, { "consume_direct_args.py": command, "consume_direct_returncode.txt": result.returncode, diff --git a/packages/testing/src/execution_testing/client_clis/file_utils.py b/packages/testing/src/execution_testing/client_clis/file_utils.py index 8e53f1729e..5b2047a6e7 100644 --- a/packages/testing/src/execution_testing/client_clis/file_utils.py +++ b/packages/testing/src/execution_testing/client_clis/file_utils.py @@ -3,6 +3,7 @@ import os import stat from json import dump +from pathlib import Path from typing import Any, Dict from pydantic import BaseModel, RootModel @@ -14,9 +15,9 @@ ) -def dump_files_to_directory(output_path: str, files: Dict[str, Any]) -> None: +def dump_files_to_directory(output_path: Path, files: Dict[str, Any]) -> None: """Dump the files to the given directory.""" - os.makedirs(output_path, exist_ok=True) + output_path.mkdir(parents=True, exist_ok=True) for file_rel_path_flags, file_contents in files.items(): file_rel_path, flags = ( file_rel_path_flags.split("+") @@ -25,8 +26,8 @@ def dump_files_to_directory(output_path: str, files: Dict[str, Any]) -> None: ) rel_path = os.path.dirname(file_rel_path) if rel_path: - os.makedirs(os.path.join(output_path, rel_path), exist_ok=True) - file_path = os.path.join(output_path, file_rel_path) + os.makedirs(output_path / rel_path, exist_ok=True) + file_path = output_path / file_rel_path with open(file_path, "w") as f: if isinstance(file_contents, (LazyAllocStr, LazyAllocJson)): if isinstance(file_contents, LazyAllocJson): diff --git a/packages/testing/src/execution_testing/client_clis/transition_tool.py b/packages/testing/src/execution_testing/client_clis/transition_tool.py index 074bbfd819..ffb1c2ae2a 100644 --- a/packages/testing/src/execution_testing/client_clis/transition_tool.py +++ b/packages/testing/src/execution_testing/client_clis/transition_tool.py @@ -122,6 +122,53 @@ def get_valid_transition_tool_names() -> set[str]: return {fork.transition_tool_name() for fork in all_available_forks} +class OutputCache: + """ + Single-key cache for t8n outputs. + + Stores results for one test at a time. When the key changes, the previous + cache is cleared. Works with xdist loadgroup which ensures related fixture + formats run on the same worker. + """ + + def __init__(self) -> None: + """Initialize the cache.""" + self._cache: Dict[int, TransitionToolOutput] = {} + self.key: str | None = None + self.hits = 0 + self.misses = 0 + + def set_key(self, key: str) -> bool: + """ + Set the current key, returning True if it was already cached. + + Clears the cache when the key changes. + """ + if key == self.key: + return True + # New key - clear cache and start fresh + self._cache.clear() + self.key = key + return False + + def get(self, subkey: int) -> TransitionToolOutput | None: + """Get a value from the cache for the current key.""" + if subkey in self._cache: + self.hits += 1 + return self._cache[subkey] + self.misses += 1 + return None + + def set(self, subkey: int, value: TransitionToolOutput) -> None: + """Set a value in the cache for the current key.""" + self._cache[subkey] = value + + def clear(self) -> None: + """Clear the cache and reset the key.""" + self._cache.clear() + self.key = None + + class TransitionTool(EthereumCLI): """ Transition tool abstract base class which should be inherited by all @@ -141,8 +188,12 @@ class TransitionTool(EthereumCLI): t8n_use_server: bool = False server_url: str | None = None process: Optional[subprocess.Popen] = None - supports_opcode_count: ClassVar[bool] = False + output_cache: OutputCache | None = None + debug_dump_dir: Path | None = None + call_counter: int = 0 + opcode_count: OpcodeCount | None = None + supports_opcode_count: ClassVar[bool] = False supports_xdist: ClassVar[bool] = True supports_blob_params: ClassVar[bool] = False fork_name_map: ClassVar[Dict[str, str]] = {} @@ -186,15 +237,14 @@ def shutdown(self) -> None: def reset_traces(self) -> None: """Reset the internal trace storage for a new test to begin.""" - self.traces = None + self.traces = [] def append_traces(self, new_traces: Traces) -> None: """ Append a list of traces of a state transition to the current list. """ - if self.traces is None: - self.traces = [] + assert self.traces is not None self.traces.append(new_traces) def get_traces(self) -> List[Traces] | None: @@ -205,7 +255,7 @@ def collect_traces( self, receipts: List[TransactionReceipt], temp_dir: tempfile.TemporaryDirectory, - debug_output_path: str = "", + debug_output_path: Path | None, ) -> Traces: """ Collect the traces from the t8n tool output and store them in the @@ -225,6 +275,29 @@ def collect_traces( self.append_traces(traces) return traces + def set_cache(self, *, key: str) -> bool: + """ + Set the current cache key. + + Creates the cache on first call, then reuses it for single-key + eviction. + Returns True if the key was already in the cache (hit). + """ + if self.output_cache is None: + self.output_cache = OutputCache() + return self.output_cache.set_key(key) + + def remove_cache(self) -> None: + """Clear the cache (test doesn't use caching).""" + if self.output_cache is not None: + self.output_cache.clear() + + def reset_opcode_count(self) -> None: + """ + Reset the opcode count to zero. + """ + self.opcode_count = OpcodeCount({}) + @dataclass class TransitionToolData: """Transition tool files and data to pass between methods.""" @@ -305,7 +378,7 @@ def _evaluate_filesystem( self, *, t8n_data: TransitionToolData, - debug_output_path: str = "", + debug_output_path: Path | None, profiler: Profiler, ) -> TransitionToolOutput: """ @@ -359,7 +432,7 @@ def _evaluate_filesystem( "--state.chainid", str(t8n_data.chain_id), ] - if self.supports_opcode_count: + if self.supports_opcode_count and self.opcode_count is not None: args.extend( [ "--opcode.count", @@ -429,7 +502,7 @@ def _evaluate_filesystem( temp_dir_path / "output", context={"exception_mapper": self.exception_mapper}, ) - if self.supports_opcode_count: + if self.supports_opcode_count and self.opcode_count is not None: opcode_count_file_path = Path(temp_dir.name) / "opcodes.json" if opcode_count_file_path.exists(): opcode_count = OpcodeCount.model_validate_json( @@ -507,7 +580,7 @@ def _evaluate_server( self, *, t8n_data: TransitionToolData, - debug_output_path: str = "", + debug_output_path: Path | None, timeout: int, profiler: Profiler, ) -> TransitionToolOutput: @@ -595,7 +668,7 @@ def _evaluate_stream( self, *, t8n_data: TransitionToolData, - debug_output_path: str = "", + debug_output_path: Path | None, profiler: Profiler, ) -> TransitionToolOutput: """ @@ -617,10 +690,11 @@ def _evaluate_stream( stderr=subprocess.PIPE, ) - with profiler.pause(): - self.dump_debug_stream( - debug_output_path, temp_dir, stdin, args, result - ) + if debug_output_path: + with profiler.pause(): + self.dump_debug_stream( + debug_output_path, temp_dir, stdin, args, result + ) if result.returncode != 0: raise Exception("failed to evaluate: " + result.stderr.decode()) @@ -737,7 +811,7 @@ def construct_args_stream( def dump_debug_stream( self, - debug_output_path: str, + debug_output_path: Path, temp_dir: tempfile.TemporaryDirectory, stdin: TransitionToolInput, args: List[str], @@ -746,9 +820,6 @@ def dump_debug_stream( """ Export debug files if requested when interacting with t8n via streams. """ - if not debug_output_path: - return - t8n_call = " ".join(args) t8n_output_base_dir = os.path.join(debug_output_path, "t8n.sh.out") if self.trace: @@ -782,11 +853,79 @@ def dump_debug_stream( }, ) + def _evaluate( + self, + *, + transition_tool_data: TransitionToolData, + debug_output_path: Path | None, + slow_request: bool, + profiler: Profiler, + ) -> TransitionToolOutput: + """ + Execute the relevant evaluate method as required by the `t8n` tool. + + If a client's `t8n` tool varies from the default behavior, this method + can be overridden. + """ + if self.t8n_use_server: + if not self.server_url: + self.start_server() + return self._evaluate_server( + t8n_data=transition_tool_data, + debug_output_path=debug_output_path, + timeout=SLOW_REQUEST_TIMEOUT + if slow_request + else NORMAL_SERVER_TIMEOUT, + profiler=profiler, + ) + + elif self.t8n_use_stream: + return self._evaluate_stream( + t8n_data=transition_tool_data, + debug_output_path=debug_output_path, + profiler=profiler, + ) + else: + return self._evaluate_filesystem( + t8n_data=transition_tool_data, + debug_output_path=debug_output_path, + profiler=profiler, + ) + + def get_next_transition_tool_output_path( + self, call_id: int + ) -> Path | None: + """Return path to the next transition tool output file.""" + debug_dump_dir = self.debug_dump_dir + if debug_dump_dir is None: + return None + return debug_dump_dir / str(call_id) + + def increment_call_counter(self) -> int: + """Increment the call counter by one and return the previous value.""" + previous_value = self.call_counter + self.call_counter += 1 + return previous_value + + def process_result( + self, result: TransitionToolOutput + ) -> TransitionToolOutput: + """ + Process the result of the transition tool evaluation performing the + following operations: + - Add opcode count to the result if available. + """ + if ( + result.result.opcode_count is not None + and self.opcode_count is not None + ): + self.opcode_count += result.result.opcode_count + return result + def evaluate( self, *, transition_tool_data: TransitionToolData, - debug_output_path: str = "", slow_request: bool = False, ) -> TransitionToolOutput: """ @@ -795,33 +934,26 @@ def evaluate( If a client's `t8n` tool varies from the default behavior, this method can be overridden. """ + current_call_id = self.increment_call_counter() + if self.output_cache is not None: + cached_result = self.output_cache.get(current_call_id) + if cached_result is not None: + return self.process_result(cached_result) + debug_output_path = self.get_next_transition_tool_output_path( + current_call_id + ) with Profiler( - enabled=debug_output_path != "", - filename=Path(debug_output_path) / "profile.out" + enabled=debug_output_path is not None, + filename=debug_output_path / "profile.out" if debug_output_path else None, ) as profiler: - if self.t8n_use_server: - if not self.server_url: - self.start_server() - return self._evaluate_server( - t8n_data=transition_tool_data, - debug_output_path=debug_output_path, - timeout=SLOW_REQUEST_TIMEOUT - if slow_request - else NORMAL_SERVER_TIMEOUT, - profiler=profiler, - ) - - elif self.t8n_use_stream: - return self._evaluate_stream( - t8n_data=transition_tool_data, - debug_output_path=debug_output_path, - profiler=profiler, - ) - else: - return self._evaluate_filesystem( - t8n_data=transition_tool_data, - debug_output_path=debug_output_path, - profiler=profiler, - ) + result = self._evaluate( + transition_tool_data=transition_tool_data, + debug_output_path=debug_output_path, + slow_request=slow_request, + profiler=profiler, + ) + if self.output_cache is not None: + self.output_cache.set(current_call_id, result) + return self.process_result(result) diff --git a/packages/testing/src/execution_testing/fixtures/__init__.py b/packages/testing/src/execution_testing/fixtures/__init__.py index 25e0805b28..eb7ddf7755 100644 --- a/packages/testing/src/execution_testing/fixtures/__init__.py +++ b/packages/testing/src/execution_testing/fixtures/__init__.py @@ -5,6 +5,7 @@ FixtureFillingPhase, FixtureFormat, LabeledFixtureFormat, + strip_fixture_format_from_node, ) from .blockchain import ( BlockchainEngineFixture, @@ -49,6 +50,7 @@ "PreAllocGroupBuilders", "PreAllocGroups", "StateFixture", + "strip_fixture_format_from_node", "TestInfo", "TransactionFixture", "merge_partial_fixture_files", diff --git a/packages/testing/src/execution_testing/fixtures/base.py b/packages/testing/src/execution_testing/fixtures/base.py index 07416c7555..ce0395dc0a 100644 --- a/packages/testing/src/execution_testing/fixtures/base.py +++ b/packages/testing/src/execution_testing/fixtures/base.py @@ -4,7 +4,17 @@ import json from enum import Enum, auto from functools import cached_property -from typing import Annotated, Any, ClassVar, Dict, List, Set, Type, Union +from typing import ( + Annotated, + Any, + ClassVar, + Dict, + List, + Protocol, + Set, + Type, + Union, +) import pytest from pydantic import ( @@ -71,6 +81,7 @@ class BaseFixture(CamelModel): format_phases: ClassVar[Set[FixtureFillingPhase]] = { FixtureFillingPhase.FILL } + transition_tool_cache_key: ClassVar[str] = "" @classmethod def output_base_dir_name(cls) -> str: @@ -230,6 +241,11 @@ def format_phases(self) -> Set[FixtureFillingPhase]: """Get the filling format phases where it should be included.""" return self.format.format_phases + @property + def transition_tool_cache_key(self) -> str: + """Get the transition tool cache key.""" + return self.format.transition_tool_cache_key + def __eq__(self, other: Any) -> bool: """ Check if two labeled fixture formats are equal. @@ -252,3 +268,40 @@ def __eq__(self, other: Any) -> bool: lambda f: BaseFixture.formats[f] if f in BaseFixture.formats else f ), ] + + +class PytestItemProtocol(Protocol): + """Protocol that resembles pytest.Item.""" + + @property + def nodeid(self) -> str: + """Return the nodeid of the item.""" + ... + + def get_closest_marker(self, name: str) -> pytest.Mark | None: + """Return the closest marker with the given name.""" + ... + + +def strip_fixture_format_from_node( + item: PytestItemProtocol, +) -> str: + """ + Remove fixture format suffix from a test nodeid. + + Used for cache keys and xdist grouping to ensure related fixture formats + (e.g., blockchain_test and blockchain_test_engine) share the same key. + + Example: + 'test.py::test[fork_Osaka-state_test]' -> 'test.py::test[fork_Osaka]' + + """ + fixture_format_id_marker = item.get_closest_marker("fixture_format_id") + nodeid = item.nodeid + if fixture_format_id_marker is None: + return nodeid + assert len(fixture_format_id_marker.args) == 1 + fixture_id = fixture_format_id_marker.args[0] + if fixture_id not in nodeid: + return nodeid + return nodeid.replace(fixture_id, "") diff --git a/packages/testing/src/execution_testing/fixtures/blockchain.py b/packages/testing/src/execution_testing/fixtures/blockchain.py index 0da7031b68..a653e4d97e 100644 --- a/packages/testing/src/execution_testing/fixtures/blockchain.py +++ b/packages/testing/src/execution_testing/fixtures/blockchain.py @@ -730,6 +730,7 @@ class BlockchainFixture(BlockchainFixtureCommon): genesis_rlp: Bytes = Field(..., alias="genesisRLP") blocks: List[FixtureBlock | InvalidFixtureBlock] seal_engine: Literal["NoProof"] = Field("NoProof") + transition_tool_cache_key: ClassVar[str] = "blockchain_test" @post_state_validator() @@ -774,6 +775,7 @@ class BlockchainEngineFixture(BlockchainEngineFixtureCommon): payloads: List[FixtureEngineNewPayload] = Field( ..., alias="engineNewPayloads" ) + transition_tool_cache_key: ClassVar[str] = "blockchain_test" @post_state_validator(alternate_field="post_state_diff") @@ -797,6 +799,7 @@ class BlockchainEngineXFixture(BlockchainEngineFixtureCommon): FixtureFillingPhase.FILL, FixtureFillingPhase.PRE_ALLOC_GENERATION, } + transition_tool_cache_key: ClassVar[str] = "" pre_hash: str """Hash of the pre-allocation group this test belongs to.""" @@ -861,6 +864,7 @@ class BlockchainEngineSyncFixture(BlockchainEngineFixture): "Tests that generate a blockchain test fixture for Engine API " "testing with client sync." ) + transition_tool_cache_key: ClassVar[str] = "" sync_payload: FixtureEngineNewPayload | None = None @classmethod diff --git a/packages/testing/src/execution_testing/specs/base.py b/packages/testing/src/execution_testing/specs/base.py index b7157a2f8a..9eec347556 100644 --- a/packages/testing/src/execution_testing/specs/base.py +++ b/packages/testing/src/execution_testing/specs/base.py @@ -5,8 +5,6 @@ from abc import abstractmethod from enum import StrEnum, unique from functools import reduce -from os import path -from pathlib import Path from typing import ( Any, Callable, @@ -19,12 +17,11 @@ ) import pytest -from pydantic import BaseModel, ConfigDict, Field, PrivateAttr +from pydantic import BaseModel, ConfigDict, PrivateAttr from typing_extensions import Self from execution_testing.base_types import to_hex from execution_testing.client_clis import Result, TransitionTool -from execution_testing.client_clis.cli_types import OpcodeCount from execution_testing.execution import ( BaseExecute, ExecuteFormat, @@ -102,17 +99,12 @@ class BaseTest(BaseModel): _operation_mode: OpMode | None = PrivateAttr(None) _gas_optimization: int | None = PrivateAttr(None) _gas_optimization_max_gas_limit: int | None = PrivateAttr(None) - _opcode_count: OpcodeCount | None = PrivateAttr(None) expected_benchmark_gas_used: int | None = None skip_gas_used_validation: bool = False spec_types: ClassVar[Dict[str, Type["BaseTest"]]] = {} - # Transition tool specific fields - t8n_dump_dir: Path | None = Field(None, exclude=True) - t8n_call_counter: int = Field(0, exclude=True) - supported_fixture_formats: ClassVar[ Sequence[FixtureFormat | LabeledFixtureFormat] ] = [] @@ -165,14 +157,12 @@ def from_test( new_instance = cls( tag=base_test.tag, fork=base_test.fork, - t8n_dump_dir=base_test.t8n_dump_dir, expected_benchmark_gas_used=base_test.expected_benchmark_gas_used, skip_gas_used_validation=base_test.skip_gas_used_validation, **kwargs, ) new_instance._request = base_test._request new_instance._operation_mode = base_test._operation_mode - new_instance._opcode_count = base_test._opcode_count return new_instance @classmethod @@ -221,17 +211,6 @@ def pytest_parameter_name(cls) -> str: lambda x, y: x + ("_" if y.isupper() else "") + y, cls.__name__ ).lower() - def get_next_transition_tool_output_path(self) -> str: - """Return path to the next transition tool output file.""" - if not self.t8n_dump_dir: - return "" - current_value = self.t8n_call_counter - self.t8n_call_counter += 1 - return path.join( - self.t8n_dump_dir, - str(current_value), - ) - def is_tx_gas_heavy_test(self) -> bool: """Check if the test is gas-heavy for transaction execution.""" if self._request is not None and hasattr(self._request, "node"): @@ -258,11 +237,11 @@ def is_exception_test(self) -> bool | None: ) return None - def node_id(self) -> str: - """Return the node ID of the test.""" + def node(self) -> pytest.Item | pytest.Function | None: + """Return the pytest node of the test.""" if self._request is not None and hasattr(self._request, "node"): - return self._request.node.nodeid - return "" + return self._request.node + return None def check_exception_test( self, diff --git a/packages/testing/src/execution_testing/specs/benchmark.py b/packages/testing/src/execution_testing/specs/benchmark.py index abe0df496d..8f2fe7fc83 100644 --- a/packages/testing/src/execution_testing/specs/benchmark.py +++ b/packages/testing/src/execution_testing/specs/benchmark.py @@ -503,7 +503,6 @@ def _verify_target_opcode_count( ) -> None: """Verify target opcode was executed the expected number of times.""" # Skip validation if opcode count is not available - # (e.g. currently only supported for evmone filling) if opcode_count is None: return diff --git a/packages/testing/src/execution_testing/specs/blockchain.py b/packages/testing/src/execution_testing/specs/blockchain.py index 563bc1df35..7587e0d147 100644 --- a/packages/testing/src/execution_testing/specs/blockchain.py +++ b/packages/testing/src/execution_testing/specs/blockchain.py @@ -646,18 +646,9 @@ def generate_block_data( ), blob_schedule=self.fork.blob_schedule(), ), - debug_output_path=self.get_next_transition_tool_output_path(), slow_request=self.is_tx_gas_heavy_test(), ) - if transition_tool_output.result.opcode_count is not None: - if self._opcode_count is None: - self._opcode_count = transition_tool_output.result.opcode_count - else: - self._opcode_count += ( - transition_tool_output.result.opcode_count - ) - # One special case of the invalid transactions is the blob gas used, # since this value is not included in the transition tool result, but # it is included in the block header, and some clients check it before @@ -929,8 +920,8 @@ def make_fixture( alloc = alloc.get() if isinstance(alloc, LazyAlloc) else alloc self.verify_post_state(t8n, t8n_state=alloc) info = {} - if self._opcode_count is not None: - info["opcode_count"] = self._opcode_count.model_dump() + if t8n.opcode_count is not None: + info["opcode_count"] = t8n.opcode_count.model_dump() return BlockchainFixture( fork=self.fork, genesis=genesis.header, @@ -1017,8 +1008,8 @@ def make_hive_fixture( # Create base fixture data, common to all fixture formats info = {} - if self._opcode_count is not None: - info["opcode_count"] = self._opcode_count.model_dump() + if t8n.opcode_count is not None: + info["opcode_count"] = t8n.opcode_count.model_dump() fixture_data = { "fork": self.fork, "genesis": genesis.header, @@ -1096,7 +1087,6 @@ def generate( fixture_format: FixtureFormat, ) -> BaseFixture: """Generate the BlockchainTest fixture.""" - t8n.reset_traces() if fixture_format in [ BlockchainEngineFixture, BlockchainEngineXFixture, diff --git a/packages/testing/src/execution_testing/specs/state.py b/packages/testing/src/execution_testing/specs/state.py index 16af5fe4e1..6774d2b91a 100644 --- a/packages/testing/src/execution_testing/specs/state.py +++ b/packages/testing/src/execution_testing/specs/state.py @@ -153,7 +153,6 @@ def verify_modified_gas_limit( blob_schedule=fork.blob_schedule(), state_test=True, ), - debug_output_path=self.get_next_transition_tool_output_path(), slow_request=self.is_tx_gas_heavy_test(), ) modified_traces = modified_tool_output.result.traces @@ -367,7 +366,6 @@ def make_state_test_fixture( blob_schedule=fork.blob_schedule(), state_test=True, ), - debug_output_path=self.get_next_transition_tool_output_path(), slow_request=self.is_tx_gas_heavy_test(), ) output_alloc = transition_tool_output.alloc.get() diff --git a/pyproject.toml b/pyproject.toml index 37c871096f..7535c02993 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,8 +24,8 @@ dependencies = [ "coincurve>=20,<21", "typing_extensions>=4.4", "py-ecc>=8.0.0b2,<9", - "ethereum-types>=0.2.4,<0.3", - "ethereum-rlp>=0.1.4,<0.2", + "ethereum-types>=0.3.0,<0.4", + "ethereum-rlp>=0.1.5,<0.2", "cryptography>=45.0.1,<46", "platformdirs>=4.2,<5", "libcst>=1.8,<2", @@ -417,15 +417,6 @@ ignore = [ "src/ethereum_spec_tools/evm_tools/t8n/evm_trace.py" = [ "N815" # The traces must use camel case in JSON property names ] -"src/ethereum/forks/amsterdam/blocks.py" = [ - "E501" # Line too long - needed for long ref links -] - "src/ethereum/forks/amsterdam/block_access_lists/builder.py" = [ - "E501" # Line too long - needed for long ref links - ] -"src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py" = [ - "E501" # Line too long - needed for long ref links - ] "tests/*" = ["ARG001"] "vulture_whitelist.py" = [ "B018", # Useless expression (intentional for Vulture whitelisting) diff --git a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py index 8c3fef14a0..33681d3145 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/__init__.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/__init__.py @@ -12,10 +12,7 @@ add_touched_account, build_block_access_list, ) -from .rlp_utils import ( - compute_block_access_list_hash, - rlp_encode_block_access_list, -) +from .rlp_utils import compute_block_access_list_hash __all__ = [ "BlockAccessListBuilder", @@ -27,5 +24,4 @@ "add_touched_account", "build_block_access_list", "compute_block_access_list_hash", - "rlp_encode_block_access_list", ] diff --git a/src/ethereum/forks/amsterdam/block_access_lists/builder.py b/src/ethereum/forks/amsterdam/block_access_lists/builder.py index ff5426746a..3b5d446e6b 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/builder.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/builder.py @@ -10,8 +10,8 @@ 2. **Build Phase**: After block execution, the accumulated data is sorted and encoded into the final deterministic format. -[`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 -""" +[`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList +""" # noqa: E501 from dataclasses import dataclass, field from typing import TYPE_CHECKING, Dict, List, Set @@ -86,8 +86,8 @@ class BlockAccessListBuilder: by address, field type, and transaction index to enable efficient reconstruction of state changes. - [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 - """ + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + """ # noqa: E501 accounts: Dict[Address, AccountData] = field(default_factory=dict) """ @@ -103,17 +103,9 @@ def ensure_account(builder: BlockAccessListBuilder, address: Address) -> None: doesn't already exist. This function is idempotent and safe to call multiple times for the same address. - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address to ensure exists. - - [`AccountData`] : - ref:ethereum.forks.amsterdam.block_access_lists.builder.AccountData + [`AccountData`]: ref:ethereum.forks.amsterdam.block_access_lists.builder.AccountData - """ + """ # noqa: E501 if address not in builder.accounts: builder.accounts[address] = AccountData() @@ -130,22 +122,7 @@ def add_storage_write( Records a storage slot modification for a given address at a specific transaction index. If multiple writes occur to the same slot within the - same transaction (same block_access_index), only the final value is kept. - - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address whose storage is being modified. - slot : - The storage slot being written to. - block_access_index : - The block access index for this change (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - new_value : - The new value being written to the storage slot. - + same transaction (same `block_access_index`), only the final value is kept. """ ensure_account(builder, address) @@ -180,17 +157,7 @@ def add_storage_read( that are both read and written will only appear in the storage changes list, not in the storage reads list, as per [EIP-7928]. - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address whose storage is being read. - slot : - The storage slot being read. - [EIP-7928]: https://eips.ethereum.org/EIPS/eip-7928 - """ ensure_account(builder, address) builder.accounts[address].storage_reads.add(slot) @@ -208,19 +175,6 @@ def add_balance_change( Records the post-transaction balance for an account after it has been modified. This includes changes from transfers, gas fees, block rewards, and any other balance-affecting operations. - - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address whose balance changed. - block_access_index : - The block access index for this change (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - post_balance : - The account balance after the change as U256. - """ ensure_account(builder, address) @@ -259,21 +213,8 @@ def add_nonce_change( a transaction or when a contract performs [`CREATE`] or [`CREATE2`] operations. - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address whose nonce changed. - block_access_index : - The block access index for this change (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - new_nonce : - The new nonce value after the change. - [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 - """ ensure_account(builder, address) @@ -306,24 +247,12 @@ def add_code_change( Add a code change to the block access list. Records contract code deployment or modification. This typically occurs - during contract creation via [`CREATE`], [`CREATE2`], or [`SETCODE`] - operations. - - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address receiving new code. - block_access_index : - The block access index for this change (0 for pre-execution, - 1..n for transactions, n+1 for post-execution). - new_code : - The deployed contract bytecode. + during contract creation via [`CREATE`], [`CREATE2`], or + [`SetCodeTransaction`][sct] operations. [`CREATE`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create [`CREATE2`]: ref:ethereum.forks.amsterdam.vm.instructions.system.create2 - + [sct]: ref:ethereum.forks.amsterdam.transactions.SetCodeTransaction """ ensure_account(builder, address) @@ -334,7 +263,8 @@ def add_code_change( for i, existing in enumerate(existing_changes): if existing.block_access_index == block_access_index: # Replace the existing code change with the new one - # For selfdestructs, this ensures we only record the final state (empty code) + # For selfdestructs, this ensures we only record the final + # state (empty code) existing_changes[i] = CodeChange( block_access_index=block_access_index, new_code=new_code ) @@ -358,23 +288,11 @@ def add_touched_account( [`EXTCODESIZE`], and [`EXTCODECOPY`] that read account data without modifying it. - Parameters - ---------- - builder : - The block access list builder instance. - address : - The account address that was accessed. - - [`EXTCODEHASH`] : - ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodehash - [`BALANCE`] : - ref:ethereum.forks.amsterdam.vm.instructions.environment.balance - [`EXTCODESIZE`] : - ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodesize - [`EXTCODECOPY`] : - ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodecopy - - """ + [`EXTCODEHASH`]: ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodehash + [`BALANCE`]: ref:ethereum.forks.amsterdam.vm.instructions.environment.balance + [`EXTCODESIZE`]: ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodesize + [`EXTCODECOPY`]: ref:ethereum.forks.amsterdam.vm.instructions.environment.extcodecopy + """ # noqa: E501 ensure_account(builder, address) @@ -392,19 +310,8 @@ def _build_from_builder( - Storage slots (lexicographically) - Transaction indices (numerically) for each change type - Parameters - ---------- - builder : - The block access list builder containing all tracked changes. - - Returns - ------- - block_access_list : - The final sorted and encoded block access list. - - [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 - - """ + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + """ # noqa: E501 block_access_list: BlockAccessList = [] for address, changes in builder.accounts.items(): @@ -460,20 +367,9 @@ def build_block_access_list( Converts the accumulated state changes from the frame-based architecture into the final deterministic BlockAccessList format. - Parameters - ---------- - state_changes : - The block-level StateChanges frame containing all changes from the block. - - Returns - ------- - block_access_list : - The final sorted and encoded block access list. - - [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 + [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList [`StateChanges`]: ref:ethereum.forks.amsterdam.state_tracker.StateChanges - - """ + """ # noqa: E501 builder = BlockAccessListBuilder() # Add all touched addresses diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py index e604d43da1..d2518f1400 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_types.py @@ -8,36 +8,33 @@ """ from dataclasses import dataclass -from typing import List, Tuple +from typing import List, Tuple, TypeAlias -from ethereum_types.bytes import Bytes, Bytes20 +from ethereum_types.bytes import Bytes from ethereum_types.frozen import slotted_freezable -from ethereum_types.numeric import U64, U256, Uint +from ethereum_types.numeric import U16, U64, U256 + +from ..fork_types import Address # Type aliases for clarity (matching EIP-7928 specification) -Address = Bytes20 -StorageKey = U256 -StorageValue = U256 -CodeData = Bytes -BlockAccessIndex = Uint # uint16 in the spec, but using Uint for compatibility -Balance = U256 # Post-transaction balance in wei -Nonce = U64 - -# Constants chosen to support a 630m block gas limit -MAX_TXS = 30_000 -# MAX_SLOTS = 300_000 -# MAX_ACCOUNTS = 300_000 -MAX_CODE_SIZE = 24_576 -MAX_CODE_CHANGES = 1 +StorageKey: TypeAlias = U256 +StorageValue: TypeAlias = U256 +CodeData: TypeAlias = Bytes +BlockAccessIndex: TypeAlias = U16 +Balance: TypeAlias = U256 # Post-transaction balance in wei +Nonce: TypeAlias = U64 @slotted_freezable @dataclass class StorageChange: """ - Storage change: [block_access_index, new_value]. - RLP encoded as a list. - """ + In a [`SlotChanges`][slot], represents a single change in an [`Account`]'s + storage slot. + + [slot]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.SlotChanges + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account + """ # noqa: E501 block_access_index: BlockAccessIndex new_value: StorageValue @@ -47,9 +44,12 @@ class StorageChange: @dataclass class BalanceChange: """ - Balance change: [block_access_index, post_balance]. - RLP encoded as a list. - """ + In a [`BlockAccessList`][bal], represents a change in an [`Account`]'s + balance. + + [bal]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account + """ # noqa: E501 block_access_index: BlockAccessIndex post_balance: Balance @@ -59,9 +59,12 @@ class BalanceChange: @dataclass class NonceChange: """ - Nonce change: [block_access_index, new_nonce]. - RLP encoded as a list. - """ + In a [`BlockAccessList`][bal], represents a change in an [`Account`]'s + nonce. + + [bal]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account + """ # noqa: E501 block_access_index: BlockAccessIndex new_nonce: Nonce @@ -71,9 +74,12 @@ class NonceChange: @dataclass class CodeChange: """ - Code change: [block_access_index, new_code]. - RLP encoded as a list. - """ + In a [`BlockAccessList`][bal], represents a change in an [`Account`]'s + code. + + [bal]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account + """ # noqa: E501 block_access_index: BlockAccessIndex new_code: CodeData @@ -83,9 +89,12 @@ class CodeChange: @dataclass class SlotChanges: """ - All changes to a single storage slot: [slot, [changes]]. - RLP encoded as a list. - """ + In a [`BlockAccessList`][bal], represents a change in an [`Account`]'s + storage. + + [bal]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account + """ # noqa: E501 slot: StorageKey changes: Tuple[StorageChange, ...] @@ -95,9 +104,9 @@ class SlotChanges: @dataclass class AccountChanges: """ - All changes for a single account, grouped by field type. - RLP encoded as: [address, storage_changes, storage_reads, - balance_changes, nonce_changes, code_changes]. + All changes for a single [`Account`], grouped by field type. + + [`Account`]: ref:ethereum.forks.amsterdam.fork_types.Account """ address: Address @@ -118,4 +127,9 @@ class AccountChanges: code_changes: Tuple[CodeChange, ...] -BlockAccessList = List[AccountChanges] +BlockAccessList: TypeAlias = List[AccountChanges] +""" +List of state changes recorded across a [`Block`]. + +[`Block`]: ref:ethereum.forks.amsterdam.blocks.Block +""" diff --git a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py index cdbd1f4626..668960a23d 100644 --- a/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py +++ b/src/ethereum/forks/amsterdam/block_access_lists/rlp_utils.py @@ -13,11 +13,7 @@ [`keccak256`]: ref:ethereum.crypto.hash.keccak256 """ -from typing import cast - -from ethereum_rlp import Extended, rlp -from ethereum_types.bytes import Bytes -from ethereum_types.numeric import Uint +from ethereum_rlp import rlp from ethereum.crypto.hash import Hash32, keccak256 @@ -31,87 +27,5 @@ def compute_block_access_list_hash( Compute the hash of a Block Access List. The Block Access List is RLP-encoded and then hashed with keccak256. - - Parameters - ---------- - block_access_list : - The Block Access List to hash. - - Returns - ------- - hash : - The keccak256 hash of the RLP-encoded Block Access List. - - """ - block_access_list_bytes = rlp_encode_block_access_list(block_access_list) - return keccak256(block_access_list_bytes) - - -def rlp_encode_block_access_list(block_access_list: BlockAccessList) -> Bytes: """ - Encode a [`BlockAccessList`] to RLP bytes. - - This is the top-level encoding function that produces the final RLP - representation of a block's access list, following the updated EIP-7928 - specification. - - Parameters - ---------- - block_access_list : - The block access list to encode. - - Returns - ------- - encoded : - The complete RLP-encoded block access list. - - [`BlockAccessList`]: ref:ethereum.forks.amsterdam.block_access_lists.rlp_types.BlockAccessList # noqa: E501 - - """ - # Encode as a list of AccountChanges directly (not wrapped) - account_changes_list = [] - for account in block_access_list: - # Each account is encoded as: - # [address, storage_changes, storage_reads, - # balance_changes, nonce_changes, code_changes] - storage_changes_list = [ - [ - slot_changes.slot, - [ - [Uint(c.block_access_index), c.new_value] - for c in slot_changes.changes - ], - ] - for slot_changes in account.storage_changes - ] - - storage_reads_list = list(account.storage_reads) - - balance_changes_list = [ - [Uint(bc.block_access_index), Uint(bc.post_balance)] - for bc in account.balance_changes - ] - - nonce_changes_list = [ - [Uint(nc.block_access_index), Uint(nc.new_nonce)] - for nc in account.nonce_changes - ] - - code_changes_list = [ - [Uint(cc.block_access_index), cc.new_code] - for cc in account.code_changes - ] - - account_changes_list.append( - [ - account.address, - storage_changes_list, - storage_reads_list, - balance_changes_list, - nonce_changes_list, - code_changes_list, - ] - ) - - encoded = rlp.encode(cast(Extended, account_changes_list)) - return Bytes(encoded) + return keccak256(rlp.encode(block_access_list)) diff --git a/src/ethereum/forks/amsterdam/state.py b/src/ethereum/forks/amsterdam/state.py index b4f17e953f..2d04c39834 100644 --- a/src/ethereum/forks/amsterdam/state.py +++ b/src/ethereum/forks/amsterdam/state.py @@ -604,32 +604,6 @@ def write_code(sender: Account) -> None: modify_state(state, address, write_code) -def set_authority_code(state: State, address: Address, code: Bytes) -> None: - """ - Sets authority account code for EIP-7702 delegation. - - This function is used specifically for setting authority code within - EIP-7702 Set Code Transactions. - - Parameters - ---------- - state: - The current state. - - address: - Address of the authority account whose code needs to be set. - - code: - The delegation designation bytecode to set. - - """ - - def write_code(sender: Account) -> None: - sender.code = code - - modify_state(state, address, write_code) - - def get_storage_original(state: State, address: Address, key: Bytes32) -> U256: """ Get the original value in a storage slot i.e. the value before the current diff --git a/src/ethereum/forks/amsterdam/state_tracker.py b/src/ethereum/forks/amsterdam/state_tracker.py index 189e088895..d0996a7b93 100644 --- a/src/ethereum/forks/amsterdam/state_tracker.py +++ b/src/ethereum/forks/amsterdam/state_tracker.py @@ -15,7 +15,7 @@ from typing import Dict, Optional, Set, Tuple from ethereum_types.bytes import Bytes, Bytes32 -from ethereum_types.numeric import U64, U256, Uint +from ethereum_types.numeric import U16, U64, U256 from .block_access_lists.rlp_types import BlockAccessIndex from .fork_types import Address @@ -89,9 +89,7 @@ def increment_block_access_index(root_frame: StateChanges) -> None: The root block-level frame. """ - root_frame.block_access_index = BlockAccessIndex( - root_frame.block_access_index + Uint(1) - ) + root_frame.block_access_index = root_frame.block_access_index + U16(1) def get_transaction_frame(state_changes: StateChanges) -> StateChanges: diff --git a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py index fe21a1c9f7..ba645ae7c4 100644 --- a/src/ethereum/forks/amsterdam/vm/eoa_delegation.py +++ b/src/ethereum/forks/amsterdam/vm/eoa_delegation.py @@ -16,7 +16,7 @@ account_exists, get_account, increment_nonce, - set_authority_code, + set_code, ) from ..state_tracker import ( capture_pre_code, @@ -215,7 +215,7 @@ def set_delegation(message: Message) -> U256: # EIP-7928: Capture pre-code before any changes capture_pre_code(tx_frame, authority, authority_code) - set_authority_code(state, authority, code_to_set) + set_code(state, authority, code_to_set) if authority_code != code_to_set: # Track code change if different from current diff --git a/tests/constantinople/eip1052_extcodehash/test_extcodehash.py b/tests/constantinople/eip1052_extcodehash/test_extcodehash.py index 47046b32d5..54c731cb9f 100644 --- a/tests/constantinople/eip1052_extcodehash/test_extcodehash.py +++ b/tests/constantinople/eip1052_extcodehash/test_extcodehash.py @@ -24,6 +24,45 @@ ] +@pytest.mark.ported_from( + [ + "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stExtCodeHash/extCodeHashSelfFiller.json", # noqa: E501 + ], + pr=["https://github.com/ethereum/execution-specs/pull/2249"], +) +def test_extcodehash_self( + state_test: StateTestFiller, + pre: Alloc, +) -> None: + """ + Test EXTCODEHASH/EXTCODESIZE of the currently executing account. + """ + storage = Storage() + slot_hash = storage.store_next(0) + slot_size = storage.store_next(0) + + code = Op.SSTORE(slot_hash, Op.EXTCODEHASH(Op.ADDRESS)) + Op.SSTORE( + slot_size, Op.EXTCODESIZE(Op.ADDRESS) + ) + + storage[slot_hash] = code.keccak256() + storage[slot_size] = len(code) + + code_address = pre.deploy_contract(code) + + tx = Transaction( + sender=pre.fund_eoa(), + to=code_address, + gas_limit=400_000, + ) + + state_test( + pre=pre, + post={code_address: Account(storage=storage)}, + tx=tx, + ) + + @pytest.mark.ported_from( [ "https://github.com/ethereum/tests/blob/v13.3/src/GeneralStateTestsFiller/stExtCodeHash/extCodeHashNonExistingAccountFiller.yml", # noqa: E501 diff --git a/tests/static/state_tests/stExtCodeHash/extCodeHashSelfFiller.json b/tests/static/state_tests/stExtCodeHash/extCodeHashSelfFiller.json deleted file mode 100644 index 1a2cd7380e..0000000000 --- a/tests/static/state_tests/stExtCodeHash/extCodeHashSelfFiller.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "extCodeHashSelf": { - "_info": { - "comment": "EXTCODEHASH/EXTCODESIZE of the currently executing account" - }, - "env": { - "currentCoinbase": "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", - "currentDifficulty": "0x20000", - "currentGasLimit": "1000000", - "currentNumber": "1", - "currentTimestamp": "1000" - }, - "expect": [ - { - "indexes": { - "data": -1, - "gas": -1, - "value": -1 - }, - "network": [ - ">=Cancun" - ], - "result": { - "deadbeef00000000000000000000000000000000": { - "balance": "1000000000000000001", - "storage": { - "0x00": "0x8807d680db87f5e22d9914d421554ef0f43ec8f589fff85a094c52a35525d2bf", - "0x01": "0x33" - } - } - } - } - ], - "pre": { - "deadbeef00000000000000000000000000000000": { - "balance": "1000000000000000000", - "code": "{ [[0]] (EXTCODEHASH 0xdeadbeef00000000000000000000000000000000) [[1]] (EXTCODESIZE 0xdeadbeef00000000000000000000000000000000) }", - "nonce": "0", - "storage": { - "0x00": "0xdeadbeef" - } - }, - "a94f5374fce5edbc8e2a8697c15331677e6ebf0b": { - "balance": "1000000000000000000", - "code": "", - "nonce": "0", - "storage": { - } - } - }, - "transaction": { - "data": [ - "" - ], - "gasLimit": [ - "400000" - ], - "gasPrice": "10", - "nonce": "0", - "secretKey": "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8", - "to": "deadbeef00000000000000000000000000000000", - "value": [ - "1" - ] - } - } -} diff --git a/tox.ini b/tox.ini index 8a4e6d3fbb..deedbbc515 100644 --- a/tox.ini +++ b/tox.ini @@ -156,7 +156,7 @@ passenv = EVM_BIN commands = fill \ - --evm-bin={env:EVM_BIN:evmone-t8n} \ + --evm-bin={env:EVM_BIN:evm} \ --gas-benchmark-values 1 \ --generate-pre-alloc-groups \ --fork Osaka \ @@ -174,9 +174,9 @@ passenv = EVM_BIN commands = fill \ - --evm-bin={env:EVM_BIN:evmone-t8n} \ + --evm-bin={env:EVM_BIN:evm} \ --fixed-opcode-count 1 \ - --fork Prague \ + --fork Osaka \ -m repricing \ -n auto --maxprocesses 10 --dist=loadgroup \ # TODO: remove skip once working for all precompiles PR#1955 @@ -194,9 +194,9 @@ passenv = commands = benchmark_parser fill \ - --evm-bin={env:EVM_BIN:evmone-t8n} \ + --evm-bin={env:EVM_BIN:evm} \ --fixed-opcode-count \ - --fork Prague \ + --fork Osaka \ -m repricing \ -n auto --maxprocesses 10 --dist=loadgroup \ # TODO: remove skip once working for all precompiles PR#1955 diff --git a/uv.lock b/uv.lock index 97fab664fb..837e911f8d 100644 --- a/uv.lock +++ b/uv.lock @@ -931,8 +931,8 @@ requires-dist = [ { name = "coincurve", specifier = ">=20,<21" }, { name = "cryptography", specifier = ">=45.0.1,<46" }, { name = "ethash", marker = "extra == 'optimized'", specifier = ">=1.1.0,<2" }, - { name = "ethereum-rlp", specifier = ">=0.1.4,<0.2" }, - { name = "ethereum-types", specifier = ">=0.2.4,<0.3" }, + { name = "ethereum-rlp", specifier = ">=0.1.5,<0.2" }, + { name = "ethereum-types", specifier = ">=0.3.0,<0.4" }, { name = "libcst", specifier = ">=1.8,<2" }, { name = "platformdirs", specifier = ">=4.2,<5" }, { name = "py-ecc", specifier = ">=8.0.0b2,<9" }, @@ -1099,8 +1099,8 @@ requires-dist = [ { name = "eth-abi", specifier = ">=5.2.0" }, { name = "ethereum-execution", editable = "." }, { name = "ethereum-hive", specifier = ">=0.1.0a1,<1.0.0" }, - { name = "ethereum-rlp", specifier = ">=0.1.3,<0.2" }, - { name = "ethereum-types", specifier = ">=0.2.1,<0.3" }, + { name = "ethereum-rlp", specifier = ">=0.1.5,<0.2" }, + { name = "ethereum-types", specifier = ">=0.3.0,<0.4" }, { name = "filelock", specifier = ">=3.15.1,<4" }, { name = "gitpython", specifier = ">=3.1.31,<4" }, { name = "jinja2", specifier = ">=3,<4" }, @@ -1155,27 +1155,27 @@ wheels = [ [[package]] name = "ethereum-rlp" -version = "0.1.4" +version = "0.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ethereum-types" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/af/91de6fc397f4b4e201c64b5bf0a7838e2d990bc2aa1451a118b01ab99c8e/ethereum_rlp-0.1.4.tar.gz", hash = "sha256:979f2161cfde39ecf6aecca765735124ca4cf57d2a43a74f353fa08f0d52557e", size = 13618, upload-time = "2025-06-24T17:16:29.576Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/2b/22ec601ed0924f8a54f9e91381e20bb8e98ad1afc8f23799826bb2c313e6/ethereum_rlp-0.1.5.tar.gz", hash = "sha256:679d4fa1163e32bc8c288680f29077980aa2f6b5321d71e8e2bff55c5233b7bd", size = 13724, upload-time = "2026-02-20T04:25:43.158Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/67/cef084060dfb2ec912a245fca5c7e282a282bd0d7cb741eaeabc4764500d/ethereum_rlp-0.1.4-py3-none-any.whl", hash = "sha256:a1612e382081bf08d7b0a6b08a89f8718dba3ca19536eec893795afa214f4f41", size = 9868, upload-time = "2025-06-24T17:16:28.18Z" }, + { url = "https://files.pythonhosted.org/packages/ce/bc/c6006debf372615af7c911e815c79f63395099defed3c8308db37d31511a/ethereum_rlp-0.1.5-py3-none-any.whl", hash = "sha256:4cbb84ec4d4e65e4af3b49757ecd623e04520cbae521ac5386151149ad0a1b7a", size = 9898, upload-time = "2026-02-20T04:25:42.074Z" }, ] [[package]] name = "ethereum-types" -version = "0.2.4" +version = "0.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/10/82c0b2e4e153d937c28300394671b69bd6c5e669422ea669514e4c3b76b3/ethereum_types-0.2.4.tar.gz", hash = "sha256:9012fd9c5f81795302ac1510631b47e79420ba6154c0e16d0417588a18fd0c90", size = 15402, upload-time = "2025-07-23T21:16:33.042Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/90/32d9440ae5b2ac97d873862c9cbbacd28c82cf6d471efb54ef3051700739/ethereum_types-0.3.0.tar.gz", hash = "sha256:e5324efd269a0f66993163366543e39aae474a53f48031c31acec956867d8995", size = 15697, upload-time = "2026-02-20T03:47:40.169Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/93/b11cf0f238f1cb24bee873650b883839fc8bd7e36b7c66e9bc4779469528/ethereum_types-0.2.4-py3-none-any.whl", hash = "sha256:38496286d55ed6010abef30b6807b00be7ba8fa82165ed4f987c2c29ad13b2ed", size = 10562, upload-time = "2025-07-23T21:16:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/73/3f/e9c35c2d879cd0084e9eb59e0e650e1688cc75ec3776291d92d4b7989fff/ethereum_types-0.3.0-py3-none-any.whl", hash = "sha256:ade1aae9df702067387b1e100f7b65fe96c87b1d7731fb7838cca79e84e51cca", size = 10685, upload-time = "2026-02-20T03:47:39.065Z" }, ] [[package]]