From 1bd56cbf4a63348a1f18ea906148ab7cb687c615 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 01:46:10 +0000 Subject: [PATCH 01/18] refactor(tests): Refactor json_infra using `pytest_collect_file` --- .../evm_tools/statetest/__init__.py | 65 +++-- tests/json_infra/conftest.py | 245 +++++++++++++++++- .../helpers/load_blockchain_tests.py | 10 +- tests/json_infra/helpers/load_state_tests.py | 22 +- tests/json_infra/test_blockchain_tests.py | 40 --- tests/json_infra/test_state_tests.py | 32 --- 6 files changed, 297 insertions(+), 117 deletions(-) delete mode 100644 tests/json_infra/test_blockchain_tests.py delete mode 100644 tests/json_infra/test_state_tests.py diff --git a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py index cd58cb3028..8015d43672 100644 --- a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py @@ -9,7 +9,7 @@ from copy import deepcopy from dataclasses import dataclass from io import StringIO -from typing import Any, Dict, Iterable, List, Optional, TextIO +from typing import Any, Dict, Generator, Iterable, List, Optional, TextIO from ethereum.utils.hexadecimal import hex_to_bytes @@ -35,6 +35,41 @@ class TestCase: transaction: Dict +def read_test_case( + test_file_path: str, key: str, test: Dict[str, Any] +) -> Generator[TestCase, None, None]: + """ + Given a key and a value, return a `TestCase` object. + """ + env = test["env"] + if not isinstance(env, dict): + raise TypeError("env not dict") + + pre = test["pre"] + if not isinstance(pre, dict): + raise TypeError("pre not dict") + + transaction = test["transaction"] + if not isinstance(transaction, dict): + raise TypeError("transaction not dict") + + for fork_name, content in test["post"].items(): + for idx, post in enumerate(content): + if not isinstance(post, dict): + raise TypeError(f'post["{fork_name}"] not dict') + + yield TestCase( + path=test_file_path, + key=key, + index=idx, + fork_name=fork_name, + post=post, + env=env, + pre=pre, + transaction=transaction, + ) + + def read_test_cases(test_file_path: str) -> Iterable[TestCase]: """ Given a path to a filled state test in JSON format, return all the @@ -44,33 +79,7 @@ def read_test_cases(test_file_path: str) -> Iterable[TestCase]: tests = json.load(test_file) for key, test in tests.items(): - env = test["env"] - if not isinstance(env, dict): - raise TypeError("env not dict") - - pre = test["pre"] - if not isinstance(pre, dict): - raise TypeError("pre not dict") - - transaction = test["transaction"] - if not isinstance(transaction, dict): - raise TypeError("transaction not dict") - - for fork_name, content in test["post"].items(): - for idx, post in enumerate(content): - if not isinstance(post, dict): - raise TypeError(f'post["{fork_name}"] not dict') - - yield TestCase( - path=test_file_path, - key=key, - index=idx, - fork_name=fork_name, - post=post, - env=env, - pre=pre, - transaction=transaction, - ) + yield from read_test_case(test_file_path, key, test) def run_test_case( diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index ee19715578..13f2e4f2a6 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -1,24 +1,44 @@ """Pytest configuration for the json infra tests.""" +import json import os import shutil import tarfile from pathlib import Path -from typing import Callable, Final, Optional, Set +from typing import ( + Any, + Callable, + Dict, + Final, + Generator, + List, + Optional, + Self, + Set, + Type, +) import git +import pytest import requests_cache from _pytest.config import Config from _pytest.config.argparsing import Parser from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Session, StashKey, fixture +from pytest import Collector, File, Session, StashKey, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from typing_extensions import Self -from . import TEST_FIXTURES +from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase +from ethereum_spec_tools.evm_tools.statetest import ( + read_test_case as read_state_test_case, +) + +from . import FORKS, TEST_FIXTURES +from .helpers.exceptional_test_patterns import exceptional_state_test_patterns +from .helpers.load_blockchain_tests import run_blockchain_st_test +from .helpers.load_state_tests import run_state_test try: from xdist import get_xdist_worker_id @@ -272,3 +292,220 @@ def pytest_sessionfinish(session: Session, exitstatus: int) -> None: assert lock_file is not None lock_file.release() + + +def pytest_collect_file( + file_path: Path, parent: Collector +) -> Collector | None: + """ + Pytest hook that collects test cases from fixture JSON files. + """ + if file_path.suffix == ".json": + return FixturesFile.from_parent(parent, path=file_path) + return None + + +class Fixture: + """Single fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: object) -> bool: + """Return true if the object can be parsed as the fixture type.""" + raise NotImplementedError("Not implemented.") + + @classmethod + def collect( + cls, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect tests from a single fixture dictionary.""" + pass + + +class StateTest(Item): + """Single state test case item.""" + + test_case: StateTestCase + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_case: StateTestCase, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_case = test_case + self.test_dict = test_dict + self.own_markers.append(pytest.mark.fork(self.test_case.fork_name)) + self.own_markers.append(pytest.mark.evm_tools) + self.own_markers.append(pytest.mark.json_state_tests) + eels_fork = FORKS[test_case.fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns( + test_case.fork_name, eels_fork + ) + if any(x.search(test_case.key) for x in test_patterns.slow): + self.own_markers.append(pytest.mark.slow) + + def runtest(self) -> None: + """Execute the test logic for this specific static test.""" + test_case_dict = { + "test_file": self.test_case.path, + "test_key": self.test_case.key, + "index": self.test_case.index, + "json_fork": self.test_case.fork_name, + "test_dict": self.test_dict, + } + run_state_test(test_case_dict) + + +class StateTestFixture(Fixture): + """Single state test fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: object) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "env" not in obj: + return False + if "pre" not in obj: + return False + if "transaction" not in obj: + return False + if "post" not in obj: + return False + return True + + @classmethod + def collect( + cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect state tests from a single fixture dictionary.""" + for test_case in read_state_test_case( + test_file_path=file_path, key=key, test=obj + ): + name = f"{key} - {test_case.index}" + new_item = StateTest.from_parent( + parent, + name=name, + test_case=test_case, + test_dict=obj, + ) + yield new_item + + +class BlockchainTest(Item): + """Single state test case item.""" + + test_file: str + test_key: str + fork_name: str + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_file: str, + test_key: str, + fork_name: str, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_file = test_file + self.test_key = test_key + self.test_dict = test_dict + self.own_markers.append(pytest.mark.fork(fork_name)) + self.own_markers.append(pytest.mark.evm_tools) + self.own_markers.append(pytest.mark.json_state_tests) + eels_fork = FORKS[fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + _identifier = "(" + test_file + "|" + test_key + ")" + if any( + x.search(test_file) for x in test_patterns.expected_fail + ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + self.own_markers.append(pytest.mark.skip("Expected to fail")) + if any(x.search(_identifier) for x in test_patterns.slow): + self.own_markers.append(pytest.mark.slow) + if any(x.search(_identifier) for x in test_patterns.big_memory): + self.own_markers.append(pytest.mark.bigmem) + + def runtest(self) -> None: + """Execute the test logic for this specific static test.""" + test_case_dict = { + "test_file": self.test_file, + "test_key": self.test_key, + "test_dict": self.test_dict, + } + run_blockchain_st_test(test_case_dict) + + +class BlockchainTestFixture(Fixture): + """Single blockchain test fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: Dict) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "genesisBlockHeader" not in obj: + return False + if "blocks" not in obj: + return False + if "engineNewPayloads" in obj: + return False + if "preHash" in obj: + return False + if "network" not in obj: + return False + return True + + @classmethod + def collect( + cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect blockchain tests from a single fixture dictionary.""" + name = f"{key}" + assert "network" in obj + new_item = BlockchainTest.from_parent( + parent, + name=name, + test_file=file_path, + test_key=key, + fork_name=obj["network"], + test_dict=obj, + ) + yield new_item + + +FixtureTypes: List[Type[Fixture]] = [ + StateTestFixture, + BlockchainTestFixture, +] + + +class FixturesFile(File): + """Single JSON file containing fixtures.""" + + def collect( + self: Self, + ) -> Generator[StateTestFixture | BlockchainTestFixture, None, None]: + """Collect test cases from a single JSON fixtures file.""" + with open(self.path, "r") as file: + try: + loaded_file = json.load(file) + if not isinstance(loaded_file, dict): + return + for key, fixture_dict in loaded_file.items(): + if not isinstance(fixture_dict, dict): + continue + for fixture_type in FixtureTypes: + if not fixture_type.is_format(fixture_dict): + continue + yield from fixture_type.collect( + parent=self, + file_path=self.path, + key=key, + obj=fixture_dict, + ) + except Exception: + return diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 990e941a35..d4a62ec878 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -34,10 +34,12 @@ def run_blockchain_st_test(test_case: Dict, load: Load) -> None: test_file = test_case["test_file"] test_key = test_case["test_key"] - with open(test_file, "r") as fp: - data = json.load(fp) - - json_data = data[test_key] + if "test_dict" in test_case: + json_data = test_case["test_dict"] + else: + with open(test_file, "r") as fp: + data = json.load(fp) + json_data = data[test_key] if "postState" not in json_data: pytest.xfail(f"{test_case} doesn't have post state") diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 37e6813402..880510bed3 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -5,7 +5,7 @@ import sys from glob import glob from io import StringIO -from typing import Dict, Generator +from typing import Any, Dict, Generator import pytest @@ -71,7 +71,7 @@ def idfn(test_case: Dict) -> str: return f"{folder_name} - {test_key} - {index}" -def run_state_test(test_case: Dict[str, str]) -> None: +def run_state_test(test_case: Dict[str, str | Dict[str, Any]]) -> None: """ Runs a single general state test. """ @@ -79,26 +79,30 @@ def run_state_test(test_case: Dict[str, str]) -> None: test_key = test_case["test_key"] index = test_case["index"] json_fork = test_case["json_fork"] - with open(test_file) as f: - tests = json.load(f) - - env = tests[test_key]["env"] + if "test_dict" in test_case: + test_dict = test_case["test_dict"] + else: + with open(test_file) as f: + tests = json.load(f) + test_dict = tests[test_key] + + env = test_dict["env"] try: env["blockHashes"] = {"0": env["previousHash"]} except KeyError: env["blockHashes"] = {} env["withdrawals"] = [] - alloc = tests[test_key]["pre"] + alloc = test_dict["pre"] - post = tests[test_key]["post"][json_fork][index] + post = test_dict["post"][json_fork][index] post_hash = post["hash"] d = post["indexes"]["data"] g = post["indexes"]["gas"] v = post["indexes"]["value"] tx = {} - for k, value in tests[test_key]["transaction"].items(): + for k, value in test_dict["transaction"].items(): if k == "data": tx["input"] = value[d] elif k == "gasLimit": diff --git a/tests/json_infra/test_blockchain_tests.py b/tests/json_infra/test_blockchain_tests.py deleted file mode 100644 index 9e19a361cf..0000000000 --- a/tests/json_infra/test_blockchain_tests.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Run the blockchain tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_blockchain_tests import ( - Load, - fetch_blockchain_tests, - idfn, - run_blockchain_st_test, -) - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for blockchain tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.json_blockchain_tests - @pytest.mark.parametrize( - "blockchain_test_case", - fetch_blockchain_tests(fork_name), - ids=idfn, - ) - def test_func(blockchain_test_case: Dict) -> None: - load = Load( - blockchain_test_case["json_fork"], - blockchain_test_case["eels_fork"], - ) - run_blockchain_st_test(blockchain_test_case, load=load) - - test_func.__name__ = f"test_blockchain_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_blockchain_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) diff --git a/tests/json_infra/test_state_tests.py b/tests/json_infra/test_state_tests.py deleted file mode 100644 index 20bb578654..0000000000 --- a/tests/json_infra/test_state_tests.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Run the state tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_state_tests import fetch_state_tests, idfn, run_state_test - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for state tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.evm_tools - @pytest.mark.json_state_tests - @pytest.mark.parametrize( - "state_test_case", - fetch_state_tests(fork_name), - ids=idfn, - ) - def test_func(state_test_case: Dict) -> None: - run_state_test(state_test_case) - - test_func.__name__ = f"test_state_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_state_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) From d11c62a09d5e30d5c926c0b069b0c04588c8a4be Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:24:31 +0000 Subject: [PATCH 02/18] fix(tests): json collecting --- tests/json_infra/conftest.py | 52 +++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 13f2e4f2a6..5f7551720d 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -4,6 +4,7 @@ import os import shutil import tarfile +from glob import glob from pathlib import Path from typing import ( Any, @@ -36,7 +37,10 @@ ) from . import FORKS, TEST_FIXTURES -from .helpers.exceptional_test_patterns import exceptional_state_test_patterns +from .helpers.exceptional_test_patterns import ( + exceptional_blockchain_test_patterns, + exceptional_state_test_patterns, +) from .helpers.load_blockchain_tests import run_blockchain_st_test from .helpers.load_state_tests import run_state_test @@ -280,6 +284,13 @@ def pytest_sessionstart(session: Session) -> None: fixture_path, ) + # Remove any python files in the downloaded files to avoid + # importing them. + for python_file in glob( + os.path.join(fixture_path, "**/*.py"), recursive=True + ): + os.unlink(python_file) + def pytest_sessionfinish(session: Session, exitstatus: int) -> None: """Clean up file locks at session finish.""" @@ -384,6 +395,8 @@ def collect( for test_case in read_state_test_case( test_file_path=file_path, key=key, test=obj ): + if test_case.fork_name not in FORKS: + continue name = f"{key} - {test_case.index}" new_item = StateTest.from_parent( parent, @@ -418,9 +431,11 @@ def __init__( self.test_dict = test_dict self.own_markers.append(pytest.mark.fork(fork_name)) self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_state_tests) + self.own_markers.append(pytest.mark.json_blockchain_tests) eels_fork = FORKS[fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + test_patterns = exceptional_blockchain_test_patterns( + fork_name, eels_fork + ) _identifier = "(" + test_file + "|" + test_key + ")" if any( x.search(test_file) for x in test_patterns.expected_fail @@ -465,7 +480,8 @@ def collect( ) -> Generator[Item, None, None]: """Collect blockchain tests from a single fixture dictionary.""" name = f"{key}" - assert "network" in obj + if "network" not in obj or obj["network"] not in FORKS: + return new_item = BlockchainTest.from_parent( parent, name=name, @@ -493,19 +509,19 @@ def collect( with open(self.path, "r") as file: try: loaded_file = json.load(file) - if not isinstance(loaded_file, dict): - return - for key, fixture_dict in loaded_file.items(): - if not isinstance(fixture_dict, dict): - continue - for fixture_type in FixtureTypes: - if not fixture_type.is_format(fixture_dict): - continue - yield from fixture_type.collect( - parent=self, - file_path=self.path, - key=key, - obj=fixture_dict, - ) except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): return + for key, fixture_dict in loaded_file.items(): + if not isinstance(fixture_dict, dict): + continue + for fixture_type in FixtureTypes: + if not fixture_type.is_format(fixture_dict): + continue + yield from fixture_type.collect( + parent=self, + file_path=str(self.path), + key=key, + obj=fixture_dict, + ) From 0b6d57c07c70bd1ec98a32345c1921ef471ce5bb Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:50:46 +0000 Subject: [PATCH 03/18] fix(tests): blockchain test execution --- tests/json_infra/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 5f7551720d..7264850e3b 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -41,7 +41,7 @@ exceptional_blockchain_test_patterns, exceptional_state_test_patterns, ) -from .helpers.load_blockchain_tests import run_blockchain_st_test +from .helpers.load_blockchain_tests import Load, run_blockchain_st_test from .helpers.load_state_tests import run_state_test try: @@ -453,7 +453,12 @@ def runtest(self) -> None: "test_key": self.test_key, "test_dict": self.test_dict, } - run_blockchain_st_test(test_case_dict) + eels_fork = FORKS[self.fork_name]["eels_fork"] + load = Load( + self.fork_name, + eels_fork, + ) + run_blockchain_st_test(test_case_dict, load=load) class BlockchainTestFixture(Fixture): From 594193848a963123bbef623004d5e0961ad5c0c9 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:53:17 +0000 Subject: [PATCH 04/18] fix(tests): blockchain test execution --- tests/json_infra/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 7264850e3b..3d6bbe353a 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -429,6 +429,7 @@ def __init__( self.test_file = test_file self.test_key = test_key self.test_dict = test_dict + self.fork_name = fork_name self.own_markers.append(pytest.mark.fork(fork_name)) self.own_markers.append(pytest.mark.evm_tools) self.own_markers.append(pytest.mark.json_blockchain_tests) From bb9c70c156c6db03a92647cc5bc2903314d34582 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 17:55:25 +0000 Subject: [PATCH 05/18] refactor(tests): Refactor types in json_infra --- tests/json_infra/conftest.py | 230 +------------- tests/json_infra/helpers/__init__.py | 13 + .../helpers/exceptional_test_patterns.py | 3 + tests/json_infra/helpers/fixtures.py | 49 +++ .../helpers/load_blockchain_tests.py | 299 ++++++++---------- tests/json_infra/helpers/load_state_tests.py | 261 +++++++-------- 6 files changed, 347 insertions(+), 508 deletions(-) create mode 100644 tests/json_infra/helpers/fixtures.py diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 3d6bbe353a..0cc080b4c8 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -7,20 +7,15 @@ from glob import glob from pathlib import Path from typing import ( - Any, Callable, - Dict, Final, Generator, - List, Optional, Self, Set, - Type, ) import git -import pytest import requests_cache from _pytest.config import Config from _pytest.config.argparsing import Parser @@ -31,18 +26,8 @@ from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase -from ethereum_spec_tools.evm_tools.statetest import ( - read_test_case as read_state_test_case, -) - -from . import FORKS, TEST_FIXTURES -from .helpers.exceptional_test_patterns import ( - exceptional_blockchain_test_patterns, - exceptional_state_test_patterns, -) -from .helpers.load_blockchain_tests import Load, run_blockchain_st_test -from .helpers.load_state_tests import run_state_test +from . import TEST_FIXTURES +from .helpers import ALL_FIXTURE_TYPES try: from xdist import get_xdist_worker_id @@ -316,201 +301,12 @@ def pytest_collect_file( return None -class Fixture: - """Single fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: object) -> bool: - """Return true if the object can be parsed as the fixture type.""" - raise NotImplementedError("Not implemented.") - - @classmethod - def collect( - cls, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect tests from a single fixture dictionary.""" - pass - - -class StateTest(Item): - """Single state test case item.""" - - test_case: StateTestCase - test_dict: Dict[str, Any] - - def __init__( - self, - *args: Any, - test_case: StateTestCase, - test_dict: Dict[str, Any], - **kwargs: Any, - ) -> None: - """Initialize a single test case item.""" - super().__init__(*args, **kwargs) - self.test_case = test_case - self.test_dict = test_dict - self.own_markers.append(pytest.mark.fork(self.test_case.fork_name)) - self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_state_tests) - eels_fork = FORKS[test_case.fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns( - test_case.fork_name, eels_fork - ) - if any(x.search(test_case.key) for x in test_patterns.slow): - self.own_markers.append(pytest.mark.slow) - - def runtest(self) -> None: - """Execute the test logic for this specific static test.""" - test_case_dict = { - "test_file": self.test_case.path, - "test_key": self.test_case.key, - "index": self.test_case.index, - "json_fork": self.test_case.fork_name, - "test_dict": self.test_dict, - } - run_state_test(test_case_dict) - - -class StateTestFixture(Fixture): - """Single state test fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: object) -> bool: - """Return true if the object can be parsed as the fixture type.""" - if "env" not in obj: - return False - if "pre" not in obj: - return False - if "transaction" not in obj: - return False - if "post" not in obj: - return False - return True - - @classmethod - def collect( - cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect state tests from a single fixture dictionary.""" - for test_case in read_state_test_case( - test_file_path=file_path, key=key, test=obj - ): - if test_case.fork_name not in FORKS: - continue - name = f"{key} - {test_case.index}" - new_item = StateTest.from_parent( - parent, - name=name, - test_case=test_case, - test_dict=obj, - ) - yield new_item - - -class BlockchainTest(Item): - """Single state test case item.""" - - test_file: str - test_key: str - fork_name: str - test_dict: Dict[str, Any] - - def __init__( - self, - *args: Any, - test_file: str, - test_key: str, - fork_name: str, - test_dict: Dict[str, Any], - **kwargs: Any, - ) -> None: - """Initialize a single test case item.""" - super().__init__(*args, **kwargs) - self.test_file = test_file - self.test_key = test_key - self.test_dict = test_dict - self.fork_name = fork_name - self.own_markers.append(pytest.mark.fork(fork_name)) - self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_blockchain_tests) - eels_fork = FORKS[fork_name]["eels_fork"] - test_patterns = exceptional_blockchain_test_patterns( - fork_name, eels_fork - ) - _identifier = "(" + test_file + "|" + test_key + ")" - if any( - x.search(test_file) for x in test_patterns.expected_fail - ) or any(x.search(_identifier) for x in test_patterns.expected_fail): - self.own_markers.append(pytest.mark.skip("Expected to fail")) - if any(x.search(_identifier) for x in test_patterns.slow): - self.own_markers.append(pytest.mark.slow) - if any(x.search(_identifier) for x in test_patterns.big_memory): - self.own_markers.append(pytest.mark.bigmem) - - def runtest(self) -> None: - """Execute the test logic for this specific static test.""" - test_case_dict = { - "test_file": self.test_file, - "test_key": self.test_key, - "test_dict": self.test_dict, - } - eels_fork = FORKS[self.fork_name]["eels_fork"] - load = Load( - self.fork_name, - eels_fork, - ) - run_blockchain_st_test(test_case_dict, load=load) - - -class BlockchainTestFixture(Fixture): - """Single blockchain test fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: Dict) -> bool: - """Return true if the object can be parsed as the fixture type.""" - if "genesisBlockHeader" not in obj: - return False - if "blocks" not in obj: - return False - if "engineNewPayloads" in obj: - return False - if "preHash" in obj: - return False - if "network" not in obj: - return False - return True - - @classmethod - def collect( - cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect blockchain tests from a single fixture dictionary.""" - name = f"{key}" - if "network" not in obj or obj["network"] not in FORKS: - return - new_item = BlockchainTest.from_parent( - parent, - name=name, - test_file=file_path, - test_key=key, - fork_name=obj["network"], - test_dict=obj, - ) - yield new_item - - -FixtureTypes: List[Type[Fixture]] = [ - StateTestFixture, - BlockchainTestFixture, -] - - class FixturesFile(File): """Single JSON file containing fixtures.""" def collect( self: Self, - ) -> Generator[StateTestFixture | BlockchainTestFixture, None, None]: + ) -> Generator[Item | Collector, None, None]: """Collect test cases from a single JSON fixtures file.""" with open(self.path, "r") as file: try: @@ -519,15 +315,19 @@ def collect( return # Skip *.json files that are unreadable. if not isinstance(loaded_file, dict): return - for key, fixture_dict in loaded_file.items(): - if not isinstance(fixture_dict, dict): + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): continue - for fixture_type in FixtureTypes: - if not fixture_type.is_format(fixture_dict): + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): continue - yield from fixture_type.collect( + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore parent=self, - file_path=str(self.path), - key=key, - obj=fixture_dict, + name=name, + test_file=str(self.path), + test_key=key, + test_dict=test_dict, ) diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index 3214c2cc14..7791d9803c 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1 +1,14 @@ """Helpers to load tests from JSON files.""" + +from typing import List, Type + +from .fixtures import Fixture +from .load_blockchain_tests import BlockchainTestFixture +from .load_state_tests import StateTestFixture + +ALL_FIXTURE_TYPES: List[Type[Fixture]] = [ + BlockchainTestFixture, + StateTestFixture, +] + +__all__ = ["ALL_FIXTURE_TYPES", "Fixture"] diff --git a/tests/json_infra/helpers/exceptional_test_patterns.py b/tests/json_infra/helpers/exceptional_test_patterns.py index cebb1a9ef5..6355cfbf81 100644 --- a/tests/json_infra/helpers/exceptional_test_patterns.py +++ b/tests/json_infra/helpers/exceptional_test_patterns.py @@ -5,6 +5,7 @@ import re from dataclasses import dataclass +from functools import lru_cache from typing import Pattern, Tuple @@ -20,6 +21,7 @@ class TestPatterns: big_memory: Tuple[Pattern[str], ...] +@lru_cache def exceptional_blockchain_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: @@ -104,6 +106,7 @@ def exceptional_blockchain_test_patterns( ) +@lru_cache def exceptional_state_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py new file mode 100644 index 0000000000..f150d33540 --- /dev/null +++ b/tests/json_infra/helpers/fixtures.py @@ -0,0 +1,49 @@ +"""Base class for all fixture loaders.""" + +from abc import ABC, abstractmethod +from typing import Any, Dict, Self + +from _pytest.nodes import Node + + +class Fixture(ABC): + """ + Single fixture from a JSON file. + + It can be subclassed in combination with Item or Collector to create a + fixture that can be collected by pytest. + """ + + test_file: str + test_key: str + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_file: str, + test_key: str, + test_dict: Dict[str, Any], + **kwargs: Any, + ): + super().__init__(*args, **kwargs) + self.test_file = test_file + self.test_key = test_key + self.test_dict = test_dict + + @classmethod + def from_parent( + cls, + parent: Node, + **kwargs: Any, + ) -> Self: + """Pytest hook that returns a fixture from a JSON file.""" + return super().from_parent( # type: ignore[misc] + parent=parent, **kwargs + ) + + @classmethod + @abstractmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + pass diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index d4a62ec878..94b54bcab1 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -1,17 +1,15 @@ """Helpers to load and run blockchain tests from JSON files.""" import importlib -import json -import os.path -from glob import glob -from typing import Any, Dict, Generator +from pathlib import Path +from typing import Any, Dict, Tuple from unittest.mock import call, patch import pytest -from _pytest.mark.structures import ParameterSet from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 +from pytest import Item from ethereum.crypto.hash import keccak256 from ethereum.exceptions import EthereumException, StateWithEmptyAccount @@ -20,6 +18,7 @@ from .. import FORKS from .exceptional_test_patterns import exceptional_blockchain_test_patterns +from .fixtures import Fixture class NoTestsFoundError(Exception): @@ -29,81 +28,6 @@ class NoTestsFoundError(Exception): """ -def run_blockchain_st_test(test_case: Dict, load: Load) -> None: - """Run a blockchain state test from JSON test case data.""" - test_file = test_case["test_file"] - test_key = test_case["test_key"] - - if "test_dict" in test_case: - json_data = test_case["test_dict"] - else: - with open(test_file, "r") as fp: - data = json.load(fp) - json_data = data[test_key] - - if "postState" not in json_data: - pytest.xfail(f"{test_case} doesn't have post state") - - genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) - parameters = [ - genesis_header, - (), - (), - ] - if hasattr(genesis_header, "withdrawals_root"): - parameters.append(()) - - if hasattr(genesis_header, "requests_root"): - parameters.append(()) - - genesis_block = load.fork.Block(*parameters) - - genesis_header_hash = hex_to_bytes(json_data["genesisBlockHeader"]["hash"]) - assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash - genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) - assert rlp.encode(genesis_block) == genesis_rlp - - try: - state = load.json_to_state(json_data["pre"]) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - chain = load.fork.BlockChain( - blocks=[genesis_block], - state=state, - chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), - ) - - mock_pow = ( - json_data["sealEngine"] == "NoProof" and not load.fork.proof_of_stake - ) - - for json_block in json_data["blocks"]: - block_exception = None - for key, value in json_block.items(): - if key.startswith("expectException"): - block_exception = value - break - - if block_exception: - # TODO: Once all the specific exception types are thrown, - # only `pytest.raises` the correct exception type instead of - # all of them. - with pytest.raises((EthereumException, RLPException)): - add_block_to_chain(chain, json_block, load, mock_pow) - return - else: - add_block_to_chain(chain, json_block, load, mock_pow) - - last_block_hash = hex_to_bytes(json_data["lastblockhash"]) - assert keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash - - expected_post_state = load.json_to_state(json_data["postState"]) - assert chain.state == expected_post_state - load.fork.close_state(chain.state) - load.fork.close_state(expected_post_state) - - def add_block_to_chain( chain: Any, json_block: Any, load: Load, mock_pow: bool ) -> None: @@ -135,96 +59,133 @@ def add_block_to_chain( ) -# Functions that fetch individual test cases -def load_json_fixture(test_file: str, json_fork: str) -> Generator: - """Load test cases from a JSON fixture file for the specified fork.""" - # Extract the pure basename of the file without the path to the file. - # Ex: Extract "world.json" from "path/to/file/world.json" - # Extract the filename without the extension. Ex: Extract "world" from - # "world.json" - with open(test_file, "r") as fp: - data = json.load(fp) - - # Search tests by looking at the `network` attribute - found_keys = [] - for key, test in data.items(): - if "network" not in test: - continue - - if test["network"] == json_fork: - found_keys.append(key) - - if not any(found_keys): - raise NoTestsFoundError - - for _key in found_keys: - yield { - "test_file": test_file, - "test_key": _key, - "json_fork": json_fork, - } - - -def fetch_blockchain_tests( - json_fork: str, -) -> Generator[Dict | ParameterSet, None, None]: - """Fetch all blockchain test cases for the specified JSON fork.""" - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["blockchain_test_dirs"] - - test_patterns = exceptional_blockchain_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class BlockchainTestFixture(Fixture, Item): + """Single blockchain test fixture from a JSON file.""" + + fork_name: str + + def __init__( + self, + *args: Any, + **kwargs: Any, + ) -> None: + """Initialize a single blockchain test fixture from a JSON file.""" + super().__init__(*args, **kwargs) + self.fork_name = self.test_dict["network"] + self.add_marker(pytest.mark.fork(self.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_blockchain_tests") + eels_fork = FORKS[self.fork_name]["eels_fork"] + test_patterns = exceptional_blockchain_test_patterns( + self.fork_name, eels_fork ) + assert self.test_file is not None + assert self.test_key is not None + _identifier = "(" + self.test_file + "|" + self.test_key + ")" + if any( + x.search(self.test_file) for x in test_patterns.expected_fail + ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + self.add_marker(pytest.mark.skip("Expected to fail")) + if any(x.search(_identifier) for x in test_patterns.slow): + self.add_marker("slow") + if any(x.search(_identifier) for x in test_patterns.big_memory): + self.add_marker("bigmem") + + def runtest(self) -> None: + """Run a blockchain state test from JSON test case data.""" + json_data = self.test_dict + if "postState" not in json_data: + pytest.xfail( + f"{self.test_file}[{self.test_key}] doesn't have post state" + ) + + eels_fork = FORKS[self.fork_name]["eels_fork"] + load = Load( + self.fork_name, + eels_fork, + ) + + genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) + parameters = [ + genesis_header, + (), + (), + ] + if hasattr(genesis_header, "withdrawals_root"): + parameters.append(()) - files_to_iterate = [] - for full_path in all_jsons: - if not any(x.search(full_path) for x in test_patterns.expected_fail): - # If a file or folder is marked for ignore, - # it can already be dropped at this stage - files_to_iterate.append(full_path) + if hasattr(genesis_header, "requests_root"): + parameters.append(()) + + genesis_block = load.fork.Block(*parameters) + + genesis_header_hash = hex_to_bytes( + json_data["genesisBlockHeader"]["hash"] + ) + assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash + genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) + assert rlp.encode(genesis_block) == genesis_rlp - # Start yielding individual test cases from the file list - for _test_file in files_to_iterate: try: - for _test_case in load_json_fixture(_test_file, json_fork): - # _identifier could identify files, folders through test_file - # individual cases through test_key - _identifier = ( - "(" - + _test_case["test_file"] - + "|" - + _test_case["test_key"] - + ")" - ) - _test_case["eels_fork"] = eels_fork - if any( - x.search(_identifier) for x in test_patterns.expected_fail - ): - continue - elif any(x.search(_identifier) for x in test_patterns.slow): - yield pytest.param(_test_case, marks=pytest.mark.slow) - elif any( - x.search(_identifier) for x in test_patterns.big_memory - ): - yield pytest.param(_test_case, marks=pytest.mark.bigmem) - else: - yield _test_case - except NoTestsFoundError: - # file doesn't contain tests for the given fork - continue - - -# Test case Identifier -def idfn(test_case: Dict) -> str: - """Generate test case identifier from test case dictionary.""" - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - # Assign Folder name and test_key to identify tests in output - return folder_name + " - " + test_case["test_key"] + state = load.json_to_state(json_data["pre"]) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) + + chain = load.fork.BlockChain( + blocks=[genesis_block], + state=state, + chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), + ) + + mock_pow = ( + json_data["sealEngine"] == "NoProof" + and not load.fork.proof_of_stake + ) + + for json_block in json_data["blocks"]: + block_exception = None + for key, value in json_block.items(): + if key.startswith("expectException"): + block_exception = value + break + + if block_exception: + # TODO: Once all the specific exception types are thrown, + # only `pytest.raises` the correct exception type instead + # of all of them. + with pytest.raises((EthereumException, RLPException)): + add_block_to_chain(chain, json_block, load, mock_pow) + return + else: + add_block_to_chain(chain, json_block, load, mock_pow) + + last_block_hash = hex_to_bytes(json_data["lastblockhash"]) + assert ( + keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash + ) + + expected_post_state = load.json_to_state(json_data["postState"]) + assert chain.state == expected_post_state + load.fork.close_state(chain.state) + load.fork.close_state(expected_post_state) + + def reportinfo(self) -> Tuple[Path, int, str]: + """Return information for test reporting.""" + return self.path, 1, self.name + + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "genesisBlockHeader" not in test_dict: + return False + if "blocks" not in test_dict: + return False + if "engineNewPayloads" in test_dict: + return False + if "preHash" in test_dict: + return False + if "network" not in test_dict: + return False + if test_dict["network"] not in FORKS: + return False + return True diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 880510bed3..21eea12df7 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -1,152 +1,165 @@ """Helper functions to load and run general state tests for Ethereum forks.""" import json -import os import sys -from glob import glob from io import StringIO -from typing import Any, Dict, Generator +from typing import Any, Dict, Iterable import pytest +from _pytest.nodes import Item +from pytest import Collector from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import read_test_cases +from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase +from ethereum_spec_tools.evm_tools.statetest import ( + read_test_case as read_state_test_case, +) from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS -from .exceptional_test_patterns import exceptional_state_test_patterns +from .exceptional_test_patterns import ( + exceptional_state_test_patterns, +) +from .fixtures import Fixture parser = create_parser() -def fetch_state_tests(json_fork: str) -> Generator: - """ - Fetches all the general state tests from the given directory. - """ - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["state_test_dirs"] - - test_patterns = exceptional_state_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class StateTest(Item): + """Single state test case item.""" + + test_case: StateTestCase + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_case: StateTestCase, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_case = test_case + self.test_dict = test_dict + self.add_marker(pytest.mark.fork(self.test_case.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_state_tests") + eels_fork = FORKS[test_case.fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns( + test_case.fork_name, eels_fork + ) + if any(x.search(test_case.key) for x in test_patterns.slow): + self.add_marker("slow") + + def runtest(self) -> None: + """ + Runs a single general state test. + """ + index = self.test_case.index + json_fork = self.test_case.fork_name + test_dict = self.test_dict + + env = test_dict["env"] + try: + env["blockHashes"] = {"0": env["previousHash"]} + except KeyError: + env["blockHashes"] = {} + env["withdrawals"] = [] + + alloc = test_dict["pre"] + + post = test_dict["post"][json_fork][index] + post_hash = post["hash"] + d = post["indexes"]["data"] + g = post["indexes"]["gas"] + v = post["indexes"]["value"] + + tx = {} + for k, value in test_dict["transaction"].items(): + if k == "data": + tx["input"] = value[d] + elif k == "gasLimit": + tx["gas"] = value[g] + elif k == "value": + tx[k] = value[v] + elif k == "accessLists": + if value[d] is not None: + tx["accessList"] = value[d] + else: + tx[k] = value + + txs = [tx] + + in_stream = StringIO( + json.dumps( + { + "env": env, + "alloc": alloc, + "txs": txs, + } + ) ) - for test_file_path in all_jsons: - test_cases = read_test_cases(test_file_path) + # Run the t8n tool + t8n_args = [ + "t8n", + "--input.alloc", + "stdin", + "--input.env", + "stdin", + "--input.txs", + "stdin", + "--state.fork", + f"{json_fork}", + "--state-test", + ] + t8n_options = parser.parse_args(t8n_args) - for test_case in test_cases: - if test_case.fork_name != json_fork: - continue + try: + t8n = T8N(t8n_options, sys.stdout, in_stream) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) - test_case_dict = { - "test_file": test_case.path, - "test_key": test_case.key, - "index": test_case.index, - "json_fork": json_fork, - } + t8n.run_state_test() - if any(x.search(test_case.key) for x in test_patterns.slow): - yield pytest.param(test_case_dict, marks=pytest.mark.slow) - else: - yield test_case_dict + assert hex_to_bytes(post_hash) == t8n.result.state_root -def idfn(test_case: Dict) -> str: +class StateTestFixture(Fixture, Collector): """ - Identify the test case. + State test fixture from a JSON file that can contain multiple test + cases. """ - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - test_key = test_case["test_key"] - index = test_case["index"] - return f"{folder_name} - {test_key} - {index}" - - -def run_state_test(test_case: Dict[str, str | Dict[str, Any]]) -> None: - """ - Runs a single general state test. - """ - test_file = test_case["test_file"] - test_key = test_case["test_key"] - index = test_case["index"] - json_fork = test_case["json_fork"] - if "test_dict" in test_case: - test_dict = test_case["test_dict"] - else: - with open(test_file) as f: - tests = json.load(f) - test_dict = tests[test_key] - - env = test_dict["env"] - try: - env["blockHashes"] = {"0": env["previousHash"]} - except KeyError: - env["blockHashes"] = {} - env["withdrawals"] = [] - - alloc = test_dict["pre"] - - post = test_dict["post"][json_fork][index] - post_hash = post["hash"] - d = post["indexes"]["data"] - g = post["indexes"]["gas"] - v = post["indexes"]["value"] - - tx = {} - for k, value in test_dict["transaction"].items(): - if k == "data": - tx["input"] = value[d] - elif k == "gasLimit": - tx["gas"] = value[g] - elif k == "value": - tx[k] = value[v] - elif k == "accessLists": - if value[d] is not None: - tx["accessList"] = value[d] - else: - tx[k] = value - - txs = [tx] - - in_stream = StringIO( - json.dumps( - { - "env": env, - "alloc": alloc, - "txs": txs, - } - ) - ) - - # Run the t8n tool - t8n_args = [ - "t8n", - "--input.alloc", - "stdin", - "--input.env", - "stdin", - "--input.txs", - "stdin", - "--state.fork", - f"{json_fork}", - "--state-test", - ] - t8n_options = parser.parse_args(t8n_args) - - try: - t8n = T8N(t8n_options, sys.stdout, in_stream) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - t8n.run_state_test() - - assert hex_to_bytes(post_hash) == t8n.result.state_root + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "env" not in test_dict: + return False + if "pre" not in test_dict: + return False + if "transaction" not in test_dict: + return False + if "post" not in test_dict: + return False + return True + + def collect(self) -> Iterable[Item | Collector]: + """Collect state test cases inside of this fixture.""" + for test_case in read_state_test_case( + test_file_path=self.test_file, + key=self.test_key, + test=self.test_dict, + ): + if test_case.fork_name not in FORKS: + continue + name = f"{test_case.index}" + yield StateTest.from_parent( + parent=self, + name=name, + test_case=test_case, + test_dict=self.test_dict, + ) From 5e9663e937d66e5f6dccc434c69e216765628d96 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:09:45 +0000 Subject: [PATCH 06/18] fix(tests): json_infra, imports, parse `exceptions` in some tests --- tests/json_infra/helpers/load_blockchain_tests.py | 3 +++ tests/json_infra/helpers/load_state_tests.py | 11 ++++------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 94b54bcab1..d312eae2e6 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -148,6 +148,9 @@ def runtest(self) -> None: if key.startswith("expectException"): block_exception = value break + if key == "exceptions": + block_exception = value + break if block_exception: # TODO: Once all the specific exception types are thrown, diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 21eea12df7..ac9532b1e6 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -12,10 +12,7 @@ from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase -from ethereum_spec_tools.evm_tools.statetest import ( - read_test_case as read_state_test_case, -) +from ethereum_spec_tools.evm_tools.statetest import TestCase, read_test_case from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS @@ -30,13 +27,13 @@ class StateTest(Item): """Single state test case item.""" - test_case: StateTestCase + test_case: TestCase test_dict: Dict[str, Any] def __init__( self, *args: Any, - test_case: StateTestCase, + test_case: TestCase, test_dict: Dict[str, Any], **kwargs: Any, ) -> None: @@ -149,7 +146,7 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: def collect(self) -> Iterable[Item | Collector]: """Collect state test cases inside of this fixture.""" - for test_case in read_state_test_case( + for test_case in read_test_case( test_file_path=self.test_file, key=self.test_key, test=self.test_dict, From 226f22cc40bbfaff04b5341c5c3bedc36499f947 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:18:36 +0000 Subject: [PATCH 07/18] refactor(tests): move some definitions --- tests/json_infra/conftest.py | 38 ++------------------------- tests/json_infra/helpers/__init__.py | 12 +++------ tests/json_infra/helpers/fixtures.py | 39 +++++++++++++++++++++++++++- 3 files changed, 44 insertions(+), 45 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 0cc080b4c8..6c64f33d26 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -1,6 +1,5 @@ """Pytest configuration for the json infra tests.""" -import json import os import shutil import tarfile @@ -9,7 +8,6 @@ from typing import ( Callable, Final, - Generator, Optional, Self, Set, @@ -22,12 +20,12 @@ from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Collector, File, Session, StashKey, fixture +from pytest import Collector, Session, StashKey, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache from . import TEST_FIXTURES -from .helpers import ALL_FIXTURE_TYPES +from .helpers import FixturesFile try: from xdist import get_xdist_worker_id @@ -299,35 +297,3 @@ def pytest_collect_file( if file_path.suffix == ".json": return FixturesFile.from_parent(parent, path=file_path) return None - - -class FixturesFile(File): - """Single JSON file containing fixtures.""" - - def collect( - self: Self, - ) -> Generator[Item | Collector, None, None]: - """Collect test cases from a single JSON fixtures file.""" - with open(self.path, "r") as file: - try: - loaded_file = json.load(file) - except Exception: - return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): - continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): - continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - test_dict=test_dict, - ) diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index 7791d9803c..a7f548ace4 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1,14 +1,10 @@ """Helpers to load tests from JSON files.""" -from typing import List, Type - -from .fixtures import Fixture +from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile from .load_blockchain_tests import BlockchainTestFixture from .load_state_tests import StateTestFixture -ALL_FIXTURE_TYPES: List[Type[Fixture]] = [ - BlockchainTestFixture, - StateTestFixture, -] +ALL_FIXTURE_TYPES.append(BlockchainTestFixture) +ALL_FIXTURE_TYPES.append(StateTestFixture) -__all__ = ["ALL_FIXTURE_TYPES", "Fixture"] +__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile"] diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index f150d33540..2f2587c76d 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -1,9 +1,11 @@ """Base class for all fixture loaders.""" +import json from abc import ABC, abstractmethod -from typing import Any, Dict, Self +from typing import Any, Dict, Generator, List, Self, Type from _pytest.nodes import Node +from pytest import Collector, File, Item class Fixture(ABC): @@ -47,3 +49,38 @@ def from_parent( def is_format(cls, test_dict: Dict[str, Any]) -> bool: """Return true if the object can be parsed as the fixture type.""" pass + + +ALL_FIXTURE_TYPES: List[Type[Fixture]] = [] + + +class FixturesFile(File): + """Single JSON file containing fixtures.""" + + def collect( + self: Self, + ) -> Generator[Item | Collector, None, None]: + """Collect test cases from a single JSON fixtures file.""" + with open(self.path, "r") as file: + try: + loaded_file = json.load(file) + except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): + return + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): + continue + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): + continue + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + test_dict=test_dict, + ) From fac94338e16712990aba3e1802f72c305b8df090 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:26:16 +0000 Subject: [PATCH 08/18] fix(tox.ini): Remove `--ignore-glob` --- tox.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/tox.ini b/tox.ini index e52433e286..3358c439e9 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,6 @@ commands = --cov-report "xml:{toxworkdir}/coverage.xml" \ --no-cov-on-fail \ --cov-branch \ - --ignore-glob='tests/json_infra/fixtures/*' \ --basetemp="{temp_dir}/pytest" \ tests/json_infra @@ -100,7 +99,6 @@ commands = pytest \ -m "not slow and not evm_tools" \ -n auto --maxprocesses 5 \ - --ignore-glob='tests/json_infra/fixtures/*' \ --ignore-glob='tests/test_t8n.py' \ --ignore-glob='eest_tests/*' \ --basetemp="{temp_dir}/pytest" \ From b7f14c1e927d12178713a4156f7463ba7627c44e Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:40:29 +0000 Subject: [PATCH 09/18] fix(tests): workaround for FileNotFoundError --- tests/json_infra/conftest.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 6c64f33d26..2ac187718d 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -272,7 +272,11 @@ def pytest_sessionstart(session: Session) -> None: for python_file in glob( os.path.join(fixture_path, "**/*.py"), recursive=True ): - os.unlink(python_file) + try: + os.unlink(python_file) + except FileNotFoundError: + # Not breaking error, another process deleted it first + pass def pytest_sessionfinish(session: Session, exitstatus: int) -> None: From f955121b0328c42c5e19d3e4eecb8c1a9df3f125 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 22:54:13 +0000 Subject: [PATCH 10/18] fix(tests): revamp cache fix(tests): Don't cache fixtures Try to implement cache Fix caching feat(tests): Manage cache during execution --- tests/json_infra/conftest.py | 15 +++- tests/json_infra/helpers/__init__.py | 4 +- .../helpers/exceptional_test_patterns.py | 3 - tests/json_infra/helpers/fixtures.py | 70 +++++++++++------ .../helpers/load_blockchain_tests.py | 19 ++++- tests/json_infra/helpers/load_state_tests.py | 77 ++++++++++++++----- 6 files changed, 134 insertions(+), 54 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 2ac187718d..c89f4711d9 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -25,7 +25,7 @@ from requests_cache.backends.sqlite import SQLiteCache from . import TEST_FIXTURES -from .helpers import FixturesFile +from .helpers import FixturesFile, FixtureTestItem try: from xdist import get_xdist_worker_id @@ -301,3 +301,16 @@ def pytest_collect_file( if file_path.suffix == ".json": return FixturesFile.from_parent(parent, path=file_path) return None + + +def pytest_runtest_teardown(item: Item, nextitem: Item) -> None: + """ + Drop cache from a `FixtureTestItem` if the next one is not of the + same type or does not belong to the same fixtures file. + """ + if isinstance(item, FixtureTestItem): + if not isinstance(nextitem, FixtureTestItem): + item.fixtures_file.clear_data_cache() + else: + if item.fixtures_file != nextitem.fixtures_file: + item.fixtures_file.clear_data_cache() diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index a7f548ace4..2980c854e2 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1,10 +1,10 @@ """Helpers to load tests from JSON files.""" -from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile +from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile, FixtureTestItem from .load_blockchain_tests import BlockchainTestFixture from .load_state_tests import StateTestFixture ALL_FIXTURE_TYPES.append(BlockchainTestFixture) ALL_FIXTURE_TYPES.append(StateTestFixture) -__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile"] +__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile", "FixtureTestItem"] diff --git a/tests/json_infra/helpers/exceptional_test_patterns.py b/tests/json_infra/helpers/exceptional_test_patterns.py index 6355cfbf81..cebb1a9ef5 100644 --- a/tests/json_infra/helpers/exceptional_test_patterns.py +++ b/tests/json_infra/helpers/exceptional_test_patterns.py @@ -5,7 +5,6 @@ import re from dataclasses import dataclass -from functools import lru_cache from typing import Pattern, Tuple @@ -21,7 +20,6 @@ class TestPatterns: big_memory: Tuple[Pattern[str], ...] -@lru_cache def exceptional_blockchain_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: @@ -106,7 +104,6 @@ def exceptional_blockchain_test_patterns( ) -@lru_cache def exceptional_state_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index 2f2587c76d..701dbee2e8 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -2,12 +2,24 @@ import json from abc import ABC, abstractmethod +from functools import cached_property from typing import Any, Dict, Generator, List, Self, Type from _pytest.nodes import Node from pytest import Collector, File, Item +class FixtureTestItem(Item): + """ + Test item that comes from a fixture file. + """ + + @property + def fixtures_file(self) -> "FixturesFile": + """Return the fixtures file from which the test was extracted.""" + raise NotImplementedError() + + class Fixture(ABC): """ Single fixture from a JSON file. @@ -18,20 +30,17 @@ class Fixture(ABC): test_file: str test_key: str - test_dict: Dict[str, Any] def __init__( self, *args: Any, test_file: str, test_key: str, - test_dict: Dict[str, Any], **kwargs: Any, ): super().__init__(*args, **kwargs) self.test_file = test_file self.test_key = test_key - self.test_dict = test_dict @classmethod def from_parent( @@ -57,30 +66,41 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: class FixturesFile(File): """Single JSON file containing fixtures.""" + @cached_property + def data(self) -> Dict[str, Any]: + """Return the JSON data of the full file.""" + # loaded once per worker per file (thanks to cached_property) + with self.fspath.open("r", encoding="utf-8") as f: + return json.load(f) + + def clear_data_cache(self) -> None: + """Drop the data cache.""" + del self.data + def collect( self: Self, ) -> Generator[Item | Collector, None, None]: """Collect test cases from a single JSON fixtures file.""" - with open(self.path, "r") as file: - try: - loaded_file = json.load(file) - except Exception: - return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): + try: + loaded_file = self.data + except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): + return + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): + continue + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): - continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - test_dict=test_dict, - ) + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + ) + # Make sure we don't keep anything from collection in memory. + self.clear_data_cache() diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index d312eae2e6..5ee4695d1e 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -9,7 +9,6 @@ from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 -from pytest import Item from ethereum.crypto.hash import keccak256 from ethereum.exceptions import EthereumException, StateWithEmptyAccount @@ -18,7 +17,7 @@ from .. import FORKS from .exceptional_test_patterns import exceptional_blockchain_test_patterns -from .fixtures import Fixture +from .fixtures import Fixture, FixturesFile, FixtureTestItem class NoTestsFoundError(Exception): @@ -59,7 +58,7 @@ def add_block_to_chain( ) -class BlockchainTestFixture(Fixture, Item): +class BlockchainTestFixture(Fixture, FixtureTestItem): """Single blockchain test fixture from a JSON file.""" fork_name: str @@ -91,6 +90,20 @@ def __init__( if any(x.search(_identifier) for x in test_patterns.big_memory): self.add_marker("bigmem") + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def runtest(self) -> None: """Run a blockchain state test from JSON test case data.""" json_data = self.test_dict diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index ac9532b1e6..dece8307eb 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -12,51 +12,73 @@ from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import TestCase, read_test_case +from ethereum_spec_tools.evm_tools.statetest import read_test_case from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS from .exceptional_test_patterns import ( exceptional_state_test_patterns, ) -from .fixtures import Fixture +from .fixtures import Fixture, FixturesFile, FixtureTestItem parser = create_parser() -class StateTest(Item): +class StateTest(FixtureTestItem): """Single state test case item.""" - test_case: TestCase - test_dict: Dict[str, Any] + index: int + fork_name: str def __init__( self, *args: Any, - test_case: TestCase, - test_dict: Dict[str, Any], + index: int, + fork_name: str, + key: str, **kwargs: Any, ) -> None: """Initialize a single test case item.""" super().__init__(*args, **kwargs) - self.test_case = test_case - self.test_dict = test_dict - self.add_marker(pytest.mark.fork(self.test_case.fork_name)) + self.index = index + self.fork_name = fork_name + self.add_marker(pytest.mark.fork(self.fork_name)) self.add_marker("evm_tools") self.add_marker("json_state_tests") - eels_fork = FORKS[test_case.fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns( - test_case.fork_name, eels_fork - ) - if any(x.search(test_case.key) for x in test_patterns.slow): + eels_fork = FORKS[fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + if any(x.search(key) for x in test_patterns.slow): self.add_marker("slow") + @property + def state_test_fixture(self) -> "StateTestFixture": + """Return the state test fixture this test belongs to.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, StateTestFixture) + return parent + + @property + def test_key(self) -> str: + """Return the key of the state test fixture in the fixture file.""" + return self.state_test_fixture.test_key + + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + return self.state_test_fixture.fixtures_file + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def runtest(self) -> None: """ Runs a single general state test. """ - index = self.test_case.index - json_fork = self.test_case.fork_name + json_fork = self.fork_name test_dict = self.test_dict env = test_dict["env"] @@ -68,7 +90,7 @@ def runtest(self) -> None: alloc = test_dict["pre"] - post = test_dict["post"][json_fork][index] + post = test_dict["post"][self.fork_name][self.index] post_hash = post["hash"] d = post["indexes"]["data"] g = post["indexes"]["gas"] @@ -144,6 +166,20 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: return False return True + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def collect(self) -> Iterable[Item | Collector]: """Collect state test cases inside of this fixture.""" for test_case in read_test_case( @@ -157,6 +193,7 @@ def collect(self) -> Iterable[Item | Collector]: yield StateTest.from_parent( parent=self, name=name, - test_case=test_case, - test_dict=self.test_dict, + index=test_case.index, + fork_name=test_case.fork_name, + key=self.test_key, ) From dde753239e907e8a4d95bd6af0a7933f6d263ca8 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Sat, 1 Nov 2025 00:06:00 +0000 Subject: [PATCH 11/18] fix(tox): Use `--dist=loadfile` --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 3358c439e9..14e3019322 100644 --- a/tox.ini +++ b/tox.ini @@ -48,7 +48,7 @@ commands = commands = pytest \ -m "not slow" \ - -n auto --maxprocesses 6 \ + -n auto --maxprocesses 6 --dist=loadfile \ --cov-config=pyproject.toml \ --cov=ethereum \ --cov-report=term \ @@ -98,7 +98,7 @@ passenv = commands = pytest \ -m "not slow and not evm_tools" \ - -n auto --maxprocesses 5 \ + -n auto --maxprocesses 5 --dist=loadfile \ --ignore-glob='tests/test_t8n.py' \ --ignore-glob='eest_tests/*' \ --basetemp="{temp_dir}/pytest" \ From 011051133789d4e4057503ce983e9cbbf11b1035 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Mon, 3 Nov 2025 22:59:08 +0000 Subject: [PATCH 12/18] fix(tests): json files cache --- tests/json_infra/conftest.py | 9 ++++---- tests/json_infra/helpers/fixtures.py | 33 ++++++++++++++-------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index c89f4711d9..7545579fa5 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -309,8 +309,9 @@ def pytest_runtest_teardown(item: Item, nextitem: Item) -> None: same type or does not belong to the same fixtures file. """ if isinstance(item, FixtureTestItem): - if not isinstance(nextitem, FixtureTestItem): + if ( + nextitem is None + or not isinstance(nextitem, FixtureTestItem) + or item.fixtures_file != nextitem.fixtures_file + ): item.fixtures_file.clear_data_cache() - else: - if item.fixtures_file != nextitem.fixtures_file: - item.fixtures_file.clear_data_cache() diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index 701dbee2e8..0189b98c39 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -75,7 +75,7 @@ def data(self) -> Dict[str, Any]: def clear_data_cache(self) -> None: """Drop the data cache.""" - del self.data + self.__dict__.pop("data", None) def collect( self: Self, @@ -85,22 +85,21 @@ def collect( loaded_file = self.data except Exception: return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): - continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): + if isinstance(loaded_file, dict): + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - ) + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): + continue + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + ) # Make sure we don't keep anything from collection in memory. self.clear_data_cache() From 9c028ec84659ef8dead21168f695e93b2a436f87 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath <48196632+gurukamath@users.noreply.github.com> Date: Thu, 20 Nov 2025 11:06:59 -0300 Subject: [PATCH 13/18] Run selective tests based on changed files (#1) * fix(tests): remove evm_tools marker from blockchain tests * remove coverage from json_infra * enhance(tools): add json_test_name to Hardfork * fix(tests): handle failing transactions in state tests * enhance(tests): add from and until fork option to json_infra * enhance(tests): run json_infra selectively * enhance(tests): subclass Hardfork * bug(tests): run all tests for t8n changes * enhance(tests): minor fix --- .github/workflows/test.yaml | 24 +++- tests/json_infra/__init__.py | 115 +----------------- tests/json_infra/conftest.py | 115 ++++++++++++------ tests/json_infra/hardfork.py | 31 +++++ tests/json_infra/helpers/fixtures.py | 18 ++- .../helpers/load_blockchain_tests.py | 24 ++-- tests/json_infra/helpers/load_state_tests.py | 35 +++++- tests/json_infra/helpers/select_tests.py | 88 ++++++++++++++ tests/json_infra/stash_keys.py | 9 ++ tests/json_infra/test_ethash.py | 6 +- tests/json_infra/test_trie.py | 10 +- tox.ini | 7 +- 12 files changed, 307 insertions(+), 175 deletions(-) create mode 100644 tests/json_infra/hardfork.py create mode 100644 tests/json_infra/helpers/select_tests.py create mode 100644 tests/json_infra/stash_keys.py diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index ff52b55ae4..f421be78f0 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -105,13 +105,35 @@ jobs: - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 with: submodules: recursive + fetch-depth: 0 # Fetch full history for commit comparison - name: Setup Python uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 with: python-version: "3.11" - uses: ./.github/actions/setup-env + - name: Get changed files and save to disk + id: get-changed-files + run: | + if [ "${{ github.event_name }}" = "pull_request" ]; then + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + else + # On push or force push to the feature branch + BASE_SHA="${{ github.event.before }}" + HEAD_SHA="${{ github.sha }}" + fi + + echo "Diffing commits: $BASE_SHA..$HEAD_SHA" + + # Get changed files and save to disk + FILE_LIST="changed_files.txt" + git diff --name-only "$BASE_SHA" "$HEAD_SHA" > "$FILE_LIST" + echo "Changed files saved to $FILE_LIST" + echo "file_list=$FILE_LIST" >> $GITHUB_OUTPUT + echo "List of files changed in the PR" + cat $FILE_LIST - name: Run json infra tests - run: tox -e json_infra + run: tox -e json_infra -- --file-list=${{ steps.get-changed-files.outputs.file_list }} - name: Upload coverage reports to Codecov uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 with: diff --git a/tests/json_infra/__init__.py b/tests/json_infra/__init__.py index 0b43585434..fd2db5553f 100644 --- a/tests/json_infra/__init__.py +++ b/tests/json_infra/__init__.py @@ -1,9 +1,11 @@ """Tests related to json infrastructure.""" -from typing import Dict, Optional, TypedDict +from typing import Dict, TypedDict from typing_extensions import NotRequired +from .hardfork import TestHardfork + class _FixtureSource(TypedDict): url: str @@ -31,113 +33,6 @@ class _FixtureSource(TypedDict): } -def _get_fixture_path(key: str) -> str: - return TEST_FIXTURES[key]["fixture_path"] - - -def _build_ethereum_test_paths( - base_path: str, legacy_fork: Optional[str] = None -) -> tuple: - if legacy_fork: - bc_path = f"{base_path}/LegacyTests/{legacy_fork}/BlockchainTests/" - state_path = ( - f"{base_path}/LegacyTests/{legacy_fork}/GeneralStateTests/" - ) - else: - bc_path = f"{base_path}/BlockchainTests/" - state_path = f"{base_path}/GeneralStateTests/" - return bc_path, state_path - - -def _build_eest_test_paths(base_path: str) -> tuple: - bc_path = f"{base_path}/fixtures/blockchain_tests/" - state_path = f"{base_path}/fixtures/state_tests/" - return bc_path, state_path - - -# Base paths -ETHEREUM_TESTS_BASE = _get_fixture_path("ethereum_tests") -EEST_TESTS_BASE = _get_fixture_path("latest_fork_tests") - -# Ethereum test paths -( - PRE_CONSTANTINOPLE_BC_ETHEREUM_TESTS, - PRE_CONSTANTINOPLE_STATE_ETHEREUM_TESTS, -) = _build_ethereum_test_paths(ETHEREUM_TESTS_BASE, "Constantinople") -( - PRE_CANCUN_BC_ETHEREUM_TESTS, - PRE_CANCUN_STATE_ETHEREUM_TESTS, -) = _build_ethereum_test_paths(ETHEREUM_TESTS_BASE, "Cancun") -BC_ETHEREUM_TESTS, STATE_ETHEREUM_TESTS = _build_ethereum_test_paths( - ETHEREUM_TESTS_BASE -) - -# EEST test paths -EEST_BC_TESTS, EEST_STATE_TESTS = _build_eest_test_paths(EEST_TESTS_BASE) - -ForkConfig = TypedDict( - "ForkConfig", - { - "eels_fork": str, - "blockchain_test_dirs": list[str], - "state_test_dirs": list[str], - }, -) - - -def _create_fork_config( - eels_fork: str, bc_dirs: list, state_dirs: list -) -> ForkConfig: - return { - "eels_fork": eels_fork, - "blockchain_test_dirs": bc_dirs, - "state_test_dirs": state_dirs, - } - - -PRE_CONSTANTINOPLE_DIRS = ( - [PRE_CONSTANTINOPLE_BC_ETHEREUM_TESTS, EEST_BC_TESTS], - [PRE_CONSTANTINOPLE_STATE_ETHEREUM_TESTS, EEST_STATE_TESTS], -) - -PRE_CANCUN_DIRS = ( - [PRE_CANCUN_BC_ETHEREUM_TESTS, EEST_BC_TESTS], - [PRE_CANCUN_STATE_ETHEREUM_TESTS, EEST_STATE_TESTS], -) - -CURRENT_DIRS = ( - [BC_ETHEREUM_TESTS, EEST_BC_TESTS], - [STATE_ETHEREUM_TESTS, EEST_STATE_TESTS], -) - -FORKS: Dict[str, ForkConfig] = { - **{ - json_fork: _create_fork_config(eels_fork, *PRE_CONSTANTINOPLE_DIRS) - for json_fork, eels_fork in [ - ("Frontier", "frontier"), - ("Homestead", "homestead"), - ("EIP150", "tangerine_whistle"), - ("EIP158", "spurious_dragon"), - ("Byzantium", "byzantium"), - ("ConstantinopleFix", "constantinople"), - ] - }, - **{ - json_fork: _create_fork_config(eels_fork, *PRE_CANCUN_DIRS) - for json_fork, eels_fork in [ - ("Istanbul", "istanbul"), - ("Berlin", "berlin"), - ("London", "london"), - ("Paris", "paris"), - ("Shanghai", "shanghai"), - ] - }, - **{ - json_fork: _create_fork_config(eels_fork, *CURRENT_DIRS) - for json_fork, eels_fork in [ - ("Cancun", "cancun"), - ("Prague", "prague"), - ("Osaka", "osaka"), - ] - }, +FORKS: Dict[str, TestHardfork] = { + fork.json_test_name: fork for fork in TestHardfork.discover() } diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 7545579fa5..135a0f8dd4 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -8,7 +8,6 @@ from typing import ( Callable, Final, - Optional, Self, Set, ) @@ -20,12 +19,14 @@ from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Collector, Session, StashKey, fixture +from pytest import Collector, Session, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from . import TEST_FIXTURES +from . import FORKS, TEST_FIXTURES from .helpers import FixturesFile, FixtureTestItem +from .helpers.select_tests import extract_affected_forks +from .stash_keys import desired_forks_key, fixture_lock try: from xdist import get_xdist_worker_id @@ -52,7 +53,7 @@ def _(path: str | Path) -> Path: def pytest_addoption(parser: Parser) -> None: """ - Accept --evm-trace option in pytest. + Accept custom options in pytest. """ parser.addoption( "--optimized", @@ -73,10 +74,33 @@ def pytest_addoption(parser: Parser) -> None: ) parser.addoption( - "--fork", - dest="fork", + "--from", + action="store", + dest="forks_from", + default="", type=str, - help="Run tests for this fork only (e.g., --fork Osaka)", + help="Fill tests from and including the specified fork.", + ) + parser.addoption( + "--until", + action="store", + dest="forks_until", + default="", + type=str, + help="Fill tests until and including the specified fork.", + ) + parser.addoption( + "--fork", + action="store", + dest="single_fork", + default="", + help="Only fill tests for the specified fork.", + ) + parser.addoption( + "--file-list", + action="store", + dest="file_list", + help="Only fill tests for the specified fork.", ) @@ -98,38 +122,54 @@ def pytest_configure(config: Config) -> None: # Replace the function in the module ethereum.trace.set_evm_trace(Eip3155Tracer()) + # Process fork range options + desired_fork = config.getoption("single_fork", "") + forks_from = config.getoption("forks_from", "") + forks_until = config.getoption("forks_until", "") + file_list = config.getoption("file_list", None) + + desired_forks = [] + all_forks = list(FORKS.keys()) + if desired_fork: + if desired_fork not in all_forks: + raise ValueError(f"Unknown fork: {desired_fork}") + desired_forks.append(desired_fork) + elif forks_from or forks_until: + # Determine start and end indices + start_idx = 0 + end_idx = len(all_forks) + + if forks_from: + try: + start_idx = all_forks.index(forks_from) + except ValueError as e: + raise ValueError(f"Unknown fork: {forks_from}") from e -def pytest_collection_modifyitems(config: Config, items: list[Item]) -> None: - """Filter test items based on the specified fork option.""" - desired_fork = config.getoption("fork", None) - if not desired_fork: - return + if forks_until: + try: + # +1 to include the until fork + end_idx = all_forks.index(forks_until) + 1 + except ValueError as e: + raise ValueError(f"Unknown fork: {forks_until}") from e - selected = [] - deselected = [] - - for item in items: - forks_of_test = [m.args[0] for m in item.iter_markers(name="fork")] - if forks_of_test and desired_fork not in forks_of_test: - deselected.append(item) - # Check if the test has a vm test marker - elif any(item.iter_markers(name="vm_test")): - callspec = getattr(item, "callspec", None) - if not callspec or "fork" not in getattr(callspec, "params", {}): - # no fork param on this test. We keep the test - selected.append(item) - continue - fork_param = callspec.params["fork"] - if fork_param[0] == desired_fork: - selected.append(item) - else: - deselected.append(item) - else: - selected.append(item) + # Validate the fork range + if start_idx >= end_idx: + raise ValueError(f"{forks_until} is before {forks_from}") - if deselected: - config.hook.pytest_deselected(items=deselected) - items[:] = selected # keep only what matches + # Extract the fork range + desired_forks = all_forks[start_idx:end_idx] + elif file_list: + desired_forks = extract_affected_forks(file_list) + else: + desired_forks = all_forks + + if not any(desired_forks): + print("No fork specific tests will be run!!!") + else: + fork_list_str = ", ".join(desired_forks) + print(f"Running tests for the following forks: {fork_list_str}") + + config.stash[desired_forks_key] = desired_forks class _FixturesDownloader: @@ -235,9 +275,6 @@ def __exit__( self.keep_cache_keys.clear() -fixture_lock = StashKey[Optional[FileLock]]() - - def pytest_sessionstart(session: Session) -> None: """Initialize test fixtures and file locking at session start.""" if get_xdist_worker_id(session) != "master": diff --git a/tests/json_infra/hardfork.py b/tests/json_infra/hardfork.py new file mode 100644 index 0000000000..4be5d67d1b --- /dev/null +++ b/tests/json_infra/hardfork.py @@ -0,0 +1,31 @@ +""" +Test-specific Hardfork subclass. + +Extends the base Hardfork class with test infrastructure properties. +""" + +from ethereum_spec_tools.forks import Hardfork + + +class TestHardfork(Hardfork): + """ + Hardfork subclass with test-specific properties. + + This class extends the base Hardfork class with properties needed + for test infrastructure, keeping test-specific concerns separated + from the core fork metadata. + """ + + @property + def json_test_name(self) -> str: + """ + Name of the hard fork in the test json fixtures. + """ + if self.title_case_name == "Tangerine Whistle": + return "EIP150" + elif self.title_case_name == "Spurious Dragon": + return "EIP158" + elif self.title_case_name == "Constantinople": + return "ConstantinopleFix" + else: + return self.title_case_name diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index 0189b98c39..f9b257f31d 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -6,7 +6,7 @@ from typing import Any, Dict, Generator, List, Self, Type from _pytest.nodes import Node -from pytest import Collector, File, Item +from pytest import Collector, Config, File, Item class FixtureTestItem(Item): @@ -59,6 +59,17 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: """Return true if the object can be parsed as the fixture type.""" pass + @classmethod + @abstractmethod + def has_desired_fork( + cls, test_dict: Dict[str, Any], config: Config + ) -> bool: + """ + Check if the fork(s) relevant to this item/ + collector are in the desired forks list. + """ + pass + ALL_FIXTURE_TYPES: List[Type[Fixture]] = [] @@ -92,6 +103,11 @@ def collect( for fixture_type in ALL_FIXTURE_TYPES: if not fixture_type.is_format(test_dict): continue + # Check if we should collect this test + if not fixture_type.has_desired_fork( + test_dict, self.config + ): + continue name = key if "::" in name: name = name.split("::")[1] diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 5ee4695d1e..ec0e9929e0 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -6,6 +6,7 @@ from unittest.mock import call, patch import pytest +from _pytest.config import Config from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 @@ -16,6 +17,7 @@ from ethereum_spec_tools.evm_tools.loaders.fixture_loader import Load from .. import FORKS +from ..stash_keys import desired_forks_key from .exceptional_test_patterns import exceptional_blockchain_test_patterns from .fixtures import Fixture, FixturesFile, FixtureTestItem @@ -72,11 +74,10 @@ def __init__( super().__init__(*args, **kwargs) self.fork_name = self.test_dict["network"] self.add_marker(pytest.mark.fork(self.fork_name)) - self.add_marker("evm_tools") self.add_marker("json_blockchain_tests") - eels_fork = FORKS[self.fork_name]["eels_fork"] + self.eels_fork = FORKS[self.fork_name].short_name test_patterns = exceptional_blockchain_test_patterns( - self.fork_name, eels_fork + self.fork_name, self.eels_fork ) assert self.test_file is not None assert self.test_key is not None @@ -112,10 +113,9 @@ def runtest(self) -> None: f"{self.test_file}[{self.test_key}] doesn't have post state" ) - eels_fork = FORKS[self.fork_name]["eels_fork"] load = Load( self.fork_name, - eels_fork, + self.eels_fork, ) genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) @@ -202,6 +202,16 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: return False if "network" not in test_dict: return False - if test_dict["network"] not in FORKS: - return False return True + + @classmethod + def has_desired_fork( + cls, test_dict: Dict[str, Any], config: Config + ) -> bool: + """ + Check if the item fork is in the desired forks list. + """ + desired_forks = config.stash.get(desired_forks_key, None) + if desired_forks is None or test_dict["network"] in desired_forks: + return True + return False diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index dece8307eb..f53360b3e5 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -3,9 +3,10 @@ import json import sys from io import StringIO -from typing import Any, Dict, Iterable +from typing import Any, Dict, Iterable, List import pytest +from _pytest.config import Config from _pytest.nodes import Item from pytest import Collector @@ -16,6 +17,7 @@ from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS +from ..stash_keys import desired_forks_key from .exceptional_test_patterns import ( exceptional_state_test_patterns, ) @@ -45,7 +47,7 @@ def __init__( self.add_marker(pytest.mark.fork(self.fork_name)) self.add_marker("evm_tools") self.add_marker("json_state_tests") - eels_fork = FORKS[fork_name]["eels_fork"] + eels_fork = FORKS[fork_name].short_name test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) if any(x.search(key) for x in test_patterns.slow): self.add_marker("slow") @@ -144,6 +146,10 @@ def runtest(self) -> None: t8n.run_state_test() + if "expectException" in post: + assert 0 in t8n.txs.rejected_txs + return + assert hex_to_bytes(post_hash) == t8n.result.state_root @@ -182,12 +188,18 @@ def test_dict(self) -> Dict[str, Any]: def collect(self) -> Iterable[Item | Collector]: """Collect state test cases inside of this fixture.""" + desired_forks: List[str] = self.config.stash.get(desired_forks_key, []) for test_case in read_test_case( test_file_path=self.test_file, key=self.test_key, test=self.test_dict, ): - if test_case.fork_name not in FORKS: + # The has_desired_fork method is used to skip the entire + # fixture file if it does not feature any of the desired + # forks. The below check is performed on the individual + # test cases within a fixture file in order to keep + # nothing other than the desired forks. + if test_case.fork_name not in desired_forks: continue name = f"{test_case.index}" yield StateTest.from_parent( @@ -197,3 +209,20 @@ def collect(self) -> Iterable[Item | Collector]: fork_name=test_case.fork_name, key=self.test_key, ) + + @classmethod + def has_desired_fork( + cls, test_dict: Dict[str, Any], config: Config + ) -> bool: + """ + Check if the collector fork list has at least + one fork in the desired fork list. + """ + desired_forks = config.stash.get(desired_forks_key, None) + if desired_forks is None: + return True + + for network in test_dict["post"].keys(): + if network in desired_forks: + return True + return False diff --git a/tests/json_infra/helpers/select_tests.py b/tests/json_infra/helpers/select_tests.py new file mode 100644 index 0000000000..95f1b90ab4 --- /dev/null +++ b/tests/json_infra/helpers/select_tests.py @@ -0,0 +1,88 @@ +""" +Targeted test selection based on changed files. + +This module reads a list of changed files and determines which fork +folders have been modified, then provides functions to generate targeted +pytest commands. +""" + +from pathlib import Path +from typing import List + +from .. import TestHardfork + +FORK_MAPPING = { + fork.short_name: fork.json_test_name for fork in TestHardfork.discover() +} + + +def extract_affected_forks(files_path: str) -> List[str]: + """ + Extract fork names from changed file paths read from disk. + + Args: + files_path: Path to file containing changed file paths + (one per line) + + Returns: + List of fork json_test_names that have been affected + + """ + all_forks = [fork.json_test_name for fork in TestHardfork.discover()] + # Read changed files from disk + changed_files_file = Path(files_path) + if not changed_files_file.exists(): + print(f"File list file {files_path} does not exist or is empty!!") + return all_forks + + with open(changed_files_file, "r") as f: + changed_files = [line.strip() for line in f if line.strip()] + + # Extract affected forks + affected_forks = set() + repo_root = Path.cwd() # Assuming running from repo root + + for file_path_str in changed_files: + if not file_path_str or file_path_str.startswith("#"): + # Skip empty lines and comments + continue + + try: + # Normalize the path + file_path = Path(file_path_str) + + # Convert absolute paths to relative + if file_path.is_absolute(): + try: + file_path = file_path.relative_to(repo_root) + except ValueError: + # Path is outside repo, skip it + continue + + except (TypeError, ValueError, OSError): + # Skip invalid paths + continue + + if file_path.is_relative_to("tests/json_infra/"): + # Run all forks if something changes in the test + # framework + return all_forks + if file_path.is_relative_to("src/ethereum_spec_tools/evm_tools"): + # Run all forks if something changes in the evm + # tools + return all_forks + + if file_path.is_relative_to("src/ethereum/"): + parts = Path(file_path).parts + if len(parts) < 4 or parts[2] != "forks": + # Run all tests if something changes in the + # non fork-specific part of src/ethereum + return all_forks + + # Run tests for specific forks + fork_short_name = parts[3] + fork_json_name = FORK_MAPPING.get(fork_short_name) + if fork_json_name: + affected_forks.add(fork_json_name) + + return list(affected_forks) diff --git a/tests/json_infra/stash_keys.py b/tests/json_infra/stash_keys.py new file mode 100644 index 0000000000..021d9a83b5 --- /dev/null +++ b/tests/json_infra/stash_keys.py @@ -0,0 +1,9 @@ +"""Shared StashKey definitions for json_infra tests.""" + +from typing import Optional + +from filelock import FileLock +from pytest import StashKey + +desired_forks_key = StashKey[list[str]]() +fixture_lock = StashKey[Optional[FileLock]]() diff --git a/tests/json_infra/test_ethash.py b/tests/json_infra/test_ethash.py index 86b52432b2..ec2368083c 100644 --- a/tests/json_infra/test_ethash.py +++ b/tests/json_infra/test_ethash.py @@ -44,7 +44,7 @@ @pytest.mark.parametrize("json_fork", POW_FORKS) def test_ethtest_fixtures(json_fork: str) -> None: """Tests ethash proof-of-work validation against ethereum test fixtures.""" - eels_fork = FORKS[json_fork]["eels_fork"] + eels_fork = FORKS[json_fork].short_name fork_module = importlib.import_module(f"ethereum.forks.{eels_fork}.fork") ethereum_tests = load_pow_test_fixtures(json_fork) @@ -79,7 +79,7 @@ def load_pow_test_fixtures(json_fork: str) -> List[Dict[str, Any]]: Loads proof-of-work test fixtures for a specific fork from JSON files. """ - eels_fork = FORKS[json_fork]["eels_fork"] + eels_fork = FORKS[json_fork].short_name header = importlib.import_module( f"ethereum.forks.{eels_fork}.blocks" ).Header @@ -122,7 +122,7 @@ def test_pow_validation_block_headers( Tests proof-of-work validation on real block headers for specific forks. """ - eels_fork = FORKS[json_fork]["eels_fork"] + eels_fork = FORKS[json_fork].short_name fork_module = importlib.import_module(f"ethereum.forks.{eels_fork}.fork") block_str_data = cast( diff --git a/tests/json_infra/test_trie.py b/tests/json_infra/test_trie.py index c0de3f1709..e950fbf486 100644 --- a/tests/json_infra/test_trie.py +++ b/tests/json_infra/test_trie.py @@ -31,7 +31,7 @@ def test_trie_secure_hex(fork: str) -> None: """Tests secure trie implementation with hex-encoded test data.""" tests = load_tests("hex_encoded_securetrie_test.json") - eels_fork = FORKS[fork]["eels_fork"] + eels_fork = FORKS[fork].short_name trie_module = importlib.import_module(f"ethereum.forks.{eels_fork}.trie") for name, test in tests.items(): @@ -48,7 +48,7 @@ def test_trie_secure(fork: str) -> None: """Tests secure trie implementation with standard test data.""" tests = load_tests("trietest_secureTrie.json") - eels_fork = FORKS[fork]["eels_fork"] + eels_fork = FORKS[fork].short_name trie_module = importlib.import_module(f"ethereum.forks.{eels_fork}.trie") for name, test in tests.items(): @@ -65,7 +65,7 @@ def test_trie_secure_any_order(fork: str) -> None: """Tests secure trie implementation with any-order test data.""" tests = load_tests("trieanyorder_secureTrie.json") - eels_fork = FORKS[fork]["eels_fork"] + eels_fork = FORKS[fork].short_name trie_module = importlib.import_module(f"ethereum.forks.{eels_fork}.trie") for name, test in tests.items(): @@ -82,7 +82,7 @@ def test_trie(fork: str) -> None: """Tests non-secure trie implementation with standard test data.""" tests = load_tests("trietest.json") - eels_fork = FORKS[fork]["eels_fork"] + eels_fork = FORKS[fork].short_name trie_module = importlib.import_module(f"ethereum.forks.{eels_fork}.trie") for name, test in tests.items(): @@ -99,7 +99,7 @@ def test_trie_any_order(fork: str) -> None: """Tests non-secure trie implementation with any-order test data.""" tests = load_tests("trieanyorder.json") - eels_fork = FORKS[fork]["eels_fork"] + eels_fork = FORKS[fork].short_name trie_module = importlib.import_module(f"ethereum.forks.{eels_fork}.trie") for name, test in tests.items(): diff --git a/tox.ini b/tox.ini index 14e3019322..8534561055 100644 --- a/tox.ini +++ b/tox.ini @@ -49,13 +49,8 @@ commands = pytest \ -m "not slow" \ -n auto --maxprocesses 6 --dist=loadfile \ - --cov-config=pyproject.toml \ - --cov=ethereum \ - --cov-report=term \ - --cov-report "xml:{toxworkdir}/coverage.xml" \ - --no-cov-on-fail \ - --cov-branch \ --basetemp="{temp_dir}/pytest" \ + {posargs} \ tests/json_infra [testenv:py3] From ef89584bd5b1a0aa40052dc49c4ae6d63a74f58d Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath Date: Fri, 21 Nov 2025 15:25:21 -0300 Subject: [PATCH 14/18] fix(tests): ignore expectSection tests and add coverage --- tests/json_infra/helpers/load_blockchain_tests.py | 15 +++++++++++++++ tox.ini | 6 ++++++ 2 files changed, 21 insertions(+) diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index ec0e9929e0..7da3c2d99f 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -113,6 +113,20 @@ def runtest(self) -> None: f"{self.test_file}[{self.test_key}] doesn't have post state" ) + # Currently, there are 5 tests in the ethereum/tests fixtures + # where we have non block specific exceptions. + # For example: All the blocks process correctly but the final + # block hash provided in the test is not correct. Or all the + # blocks process correctly but the post state provided is not + # right. Since these tests do not directly have anything to do + # with the state teansition itself, we skip these + # See src/BlockchainTestsFiller/InvalidBlocks/bcExpectSection + # in ethereum/tests + if "exceptions" in json_data: + pytest.xfail( + f"{self.test_file}[{self.test_key}] has unrelated exceptions" + ) + load = Load( self.fork_name, self.eels_fork, @@ -171,6 +185,7 @@ def runtest(self) -> None: # of all of them. with pytest.raises((EthereumException, RLPException)): add_block_to_chain(chain, json_block, load, mock_pow) + load.fork.close_state(chain.state) return else: add_block_to_chain(chain, json_block, load, mock_pow) diff --git a/tox.ini b/tox.ini index 8534561055..0208dda6df 100644 --- a/tox.ini +++ b/tox.ini @@ -49,6 +49,12 @@ commands = pytest \ -m "not slow" \ -n auto --maxprocesses 6 --dist=loadfile \ + --cov-config=pyproject.toml \ + --cov=ethereum \ + --cov-report=term \ + --cov-report "xml:{toxworkdir}/coverage.xml" \ + --no-cov-on-fail \ + --cov-branch \ --basetemp="{temp_dir}/pytest" \ {posargs} \ tests/json_infra From 7697bf6a3bafa3e6e4addb63a3aaaa262627fc33 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath Date: Sun, 23 Nov 2025 09:42:26 -0300 Subject: [PATCH 15/18] enhance(tests): refactor exception markers This commit refactors exception markers and marks the EEST static tests as slow --- .../json_infra/helpers/exceptional_test_patterns.py | 4 ++++ tests/json_infra/helpers/load_blockchain_tests.py | 13 +++++-------- tests/json_infra/helpers/load_state_tests.py | 6 +++--- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/tests/json_infra/helpers/exceptional_test_patterns.py b/tests/json_infra/helpers/exceptional_test_patterns.py index cebb1a9ef5..658f32d581 100644 --- a/tests/json_infra/helpers/exceptional_test_patterns.py +++ b/tests/json_infra/helpers/exceptional_test_patterns.py @@ -68,6 +68,8 @@ def exceptional_blockchain_test_patterns( f"tests/{ef}/eip2537_bls_12_381_precompiles/test_bls12_pairing\\.py::test_valid\\[fork_{jf}-blockchain_test-inf_pair-\\]", f"tests/{ef}/eip2537_bls_12_381_precompiles/test_bls12_pairing\\.py::test_valid\\[fork_{jf}-blockchain_test-multi_inf_pair-\\]", f"tests/{ef}/eip2935_historical_block_hashes_from_state/test_block_hashes\\.py::test_block_hashes_history\\[fork_{jf}-blockchain_test-full_history_plus_one_check_blockhash_first\\]", + # Static tests from EEST + "tests/json_infra/fixtures/latest_fork_tests/fixtures/blockchain_tests/static", ) # These are tests that are considered to be incorrect, @@ -129,6 +131,8 @@ def exceptional_state_test_patterns( f"tests/{ef}/eip2537_bls_12_381_precompiles/test_bls12_pairing\\.py::test_valid\\[fork_{jf}-state_test-bls_pairing_e(aG1,bG2)=e(G1,abG2)-\\]", f"tests/{ef}/eip2537_bls_12_381_precompiles/test_bls12_pairing\\.py::test_valid\\[fork_{jf}-state_test-inf_pair-\\]", f"tests/{ef}/eip2537_bls_12_381_precompiles/test_bls12_pairing\\.py::test_valid\\[fork_{jf}-state_test-multi_inf_pair-\\]", + # Static tests from EEST + "tests/json_infra/fixtures/latest_fork_tests/fixtures/state_tests/static", ) return TestPatterns( diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 7da3c2d99f..90c0a68cda 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -76,19 +76,16 @@ def __init__( self.add_marker(pytest.mark.fork(self.fork_name)) self.add_marker("json_blockchain_tests") self.eels_fork = FORKS[self.fork_name].short_name + + # Mark tests with exceptional markers test_patterns = exceptional_blockchain_test_patterns( self.fork_name, self.eels_fork ) - assert self.test_file is not None - assert self.test_key is not None - _identifier = "(" + self.test_file + "|" + self.test_key + ")" - if any( - x.search(self.test_file) for x in test_patterns.expected_fail - ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + if any(x.search(self.nodeid) for x in test_patterns.expected_fail): self.add_marker(pytest.mark.skip("Expected to fail")) - if any(x.search(_identifier) for x in test_patterns.slow): + if any(x.search(self.nodeid) for x in test_patterns.slow): self.add_marker("slow") - if any(x.search(_identifier) for x in test_patterns.big_memory): + if any(x.search(self.nodeid) for x in test_patterns.big_memory): self.add_marker("bigmem") @property diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index f53360b3e5..e92bfc7f39 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -37,7 +37,6 @@ def __init__( *args: Any, index: int, fork_name: str, - key: str, **kwargs: Any, ) -> None: """Initialize a single test case item.""" @@ -48,8 +47,10 @@ def __init__( self.add_marker("evm_tools") self.add_marker("json_state_tests") eels_fork = FORKS[fork_name].short_name + + # Mark tests with exceptional markers test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) - if any(x.search(key) for x in test_patterns.slow): + if any(x.search(self.nodeid) for x in test_patterns.slow): self.add_marker("slow") @property @@ -207,7 +208,6 @@ def collect(self) -> Iterable[Item | Collector]: name=name, index=test_case.index, fork_name=test_case.fork_name, - key=self.test_key, ) @classmethod From 579955929aabfb015d84d542c79134454b809925 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath Date: Mon, 24 Nov 2025 11:47:01 -0300 Subject: [PATCH 16/18] fix(tests): provide unique name to tests --- tests/json_infra/helpers/fixtures.py | 5 +---- tests/json_infra/helpers/load_state_tests.py | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index f9b257f31d..c1ab5e666a 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -108,12 +108,9 @@ def collect( test_dict, self.config ): continue - name = key - if "::" in name: - name = name.split("::")[1] yield fixture_type.from_parent( # type: ignore parent=self, - name=name, + name=key, test_file=str(self.path), test_key=key, ) diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index e92bfc7f39..cefed17182 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -202,7 +202,7 @@ def collect(self) -> Iterable[Item | Collector]: # nothing other than the desired forks. if test_case.fork_name not in desired_forks: continue - name = f"{test_case.index}" + name = f"{test_case.fork_name}::{test_case.index}" yield StateTest.from_parent( parent=self, name=name, From 6a455509f4ff17a6eb52ec1459e57e7a81b6ba86 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath Date: Thu, 27 Nov 2025 11:11:30 +0100 Subject: [PATCH 17/18] fix(tests): post review changes --- .github/workflows/test.yaml | 2 +- tests/json_infra/conftest.py | 17 ++++++++++------- tests/json_infra/helpers/fixtures.py | 3 ++- tests/json_infra/helpers/select_tests.py | 4 ++-- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index f421be78f0..8003771214 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -133,7 +133,7 @@ jobs: echo "List of files changed in the PR" cat $FILE_LIST - name: Run json infra tests - run: tox -e json_infra -- --file-list=${{ steps.get-changed-files.outputs.file_list }} + run: tox -e json_infra -- --file-list="${{ steps.get-changed-files.outputs.file_list }}" - name: Upload coverage reports to Codecov uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 with: diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 135a0f8dd4..7c3bae1171 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -14,12 +14,11 @@ import git import requests_cache -from _pytest.config import Config from _pytest.config.argparsing import Parser from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Collector, Session, fixture +from pytest import Collector, Config, Session, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache @@ -79,7 +78,7 @@ def pytest_addoption(parser: Parser) -> None: dest="forks_from", default="", type=str, - help="Fill tests from and including the specified fork.", + help="Run tests from and including the specified fork.", ) parser.addoption( "--until", @@ -87,20 +86,24 @@ def pytest_addoption(parser: Parser) -> None: dest="forks_until", default="", type=str, - help="Fill tests until and including the specified fork.", + help="Run tests until and including the specified fork.", ) parser.addoption( "--fork", action="store", dest="single_fork", default="", - help="Only fill tests for the specified fork.", + help="Only run tests for the specified fork.", ) parser.addoption( "--file-list", action="store", dest="file_list", - help="Only fill tests for the specified fork.", + help=( + "Only run tests relevant to a list of file paths in the " + "repository. This option specifies the path to a file which " + "contains a list of relevant paths." + ), ) @@ -159,7 +162,7 @@ def pytest_configure(config: Config) -> None: # Extract the fork range desired_forks = all_forks[start_idx:end_idx] elif file_list: - desired_forks = extract_affected_forks(file_list) + desired_forks = extract_affected_forks(config.rootpath, file_list) else: desired_forks = all_forks diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index c1ab5e666a..38c6b2f308 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -86,7 +86,8 @@ def data(self) -> Dict[str, Any]: def clear_data_cache(self) -> None: """Drop the data cache.""" - self.__dict__.pop("data", None) + if hasattr(self, "data"): + del self.data def collect( self: Self, diff --git a/tests/json_infra/helpers/select_tests.py b/tests/json_infra/helpers/select_tests.py index 95f1b90ab4..5ff1c25c25 100644 --- a/tests/json_infra/helpers/select_tests.py +++ b/tests/json_infra/helpers/select_tests.py @@ -16,11 +16,12 @@ } -def extract_affected_forks(files_path: str) -> List[str]: +def extract_affected_forks(repo_root: Path, files_path: str) -> List[str]: """ Extract fork names from changed file paths read from disk. Args: + repo_root: Root directory of the repository config. files_path: Path to file containing changed file paths (one per line) @@ -40,7 +41,6 @@ def extract_affected_forks(files_path: str) -> List[str]: # Extract affected forks affected_forks = set() - repo_root = Path.cwd() # Assuming running from repo root for file_path_str in changed_files: if not file_path_str or file_path_str.startswith("#"): From 09bb76cd62b55f2b44cd986c40019c3b0d030a98 Mon Sep 17 00:00:00 2001 From: Guruprasad Kamath Date: Fri, 28 Nov 2025 12:25:59 +0100 Subject: [PATCH 18/18] fix(tests): set BASE_SHA to merge base --- .github/workflows/test.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8003771214..55f62754ed 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -115,11 +115,11 @@ jobs: id: get-changed-files run: | if [ "${{ github.event_name }}" = "pull_request" ]; then - BASE_SHA="${{ github.event.pull_request.base.sha }}" + BASE_SHA=$(git merge-base "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}") HEAD_SHA="${{ github.event.pull_request.head.sha }}" else # On push or force push to the feature branch - BASE_SHA="${{ github.event.before }}" + BASE_SHA=$(git merge-base "${{ github.event.before }}" "${{ github.sha }}") HEAD_SHA="${{ github.sha }}" fi