diff --git a/.github/scripts/check-hive-results.sh b/.github/scripts/check-hive-results.sh new file mode 100755 index 00000000000..d07efd1afcd --- /dev/null +++ b/.github/scripts/check-hive-results.sh @@ -0,0 +1,192 @@ +#!/usr/bin/env bash + +# Verifies Hive JSON results, prints failing tests, copies related logs, +# and updates the GitHub summary to surface the failures in the workflow UI. + +set -euo pipefail + +if ! command -v jq >/dev/null 2>&1; then + echo "jq is required to parse Hive results but was not found in PATH" + exit 1 +fi + +results_dir="${1:-src/results}" + +if [ ! -d "$results_dir" ]; then + echo "Hive results directory '${results_dir}' not found" + exit 1 +fi + +if ! results_dir="$(cd "${results_dir}" >/dev/null 2>&1 && pwd -P)"; then + echo "Failed to resolve absolute path for Hive results directory" + exit 1 +fi + +results_parent="$(dirname "${results_dir}")" +workspace_logs_dir="" +if [ -d "${results_parent}/workspace/logs" ]; then + workspace_logs_dir="$(cd "${results_parent}/workspace/logs" >/dev/null 2>&1 && pwd -P)" +fi + +shopt -s nullglob +json_files=("${results_dir}"/*.json) +shopt -u nullglob + +if [ ${#json_files[@]} -eq 0 ]; then + echo "No Hive JSON result files found in ${results_dir}" + exit 1 +fi + +failures=0 +failed_logs_root="${results_dir}/failed_logs" +rm -rf "${failed_logs_root}" +mkdir -p "${failed_logs_root}" + +for json_file in "${json_files[@]}"; do + if [[ "${json_file}" == *"hive.json" ]]; then + continue + fi + + suite_name="$(jq -r '.name // empty' "${json_file}")" + failed_cases="$(jq '[.testCases[]? | select(.summaryResult.pass != true)] | length' "${json_file}")" + + if [ "${failed_cases}" -gt 0 ]; then + echo "Detected ${failed_cases} failing test case(s) in ${suite_name:-$(basename "${json_file}")}" + failure_list="$( + jq -r ' + .testCases[]? + | select(.summaryResult.pass != true) + | . as $case + | ($case.summaryResult // {}) as $summary + | ($summary.message // $summary.reason // $summary.error // "") as $message + | (if $summary.log? + then "log lines " + + (($summary.log.begin // "?") | tostring) + + "-" + + (($summary.log.end // "?") | tostring) + else "" + end) as $log_hint + | (if $message != "" then $message else $log_hint end) as $detail + | (if $case.clientInfo? + then ($case.clientInfo + | to_entries + | map((.value.name // .key) + ": " + (.value.logFile // "unknown log")) + | join("; ")) + else "" + end) as $clients + | "- " + ($case.name // "unknown test") + + (if $detail != "" then ": " + $detail else "" end) + + (if $clients != "" then " (client logs: " + $clients + ")" else "" end) + ' "${json_file}" + )" + + printf '%s\n' "${failure_list}" + + if [ -n "${GITHUB_STEP_SUMMARY:-}" ]; then + { + echo "### Hive failures: ${suite_name:-$(basename "${json_file}" .json)}" + printf '%s\n' "${failure_list}" + echo + } >> "${GITHUB_STEP_SUMMARY}" + fi + + suite_slug_raw="${suite_name:-$(basename "${json_file}" .json)}" + suite_slug="$(printf '%s' "${suite_slug_raw}" | tr '[:upper:]' '[:lower:]')" + suite_slug="$(printf '%s' "${suite_slug}" | sed -E 's/[^a-z0-9._-]+/-/g')" + suite_slug="${suite_slug#-}" + suite_slug="${suite_slug%-}" + suite_dir="${failed_logs_root}/${suite_slug:-suite}" + mkdir -p "${suite_dir}" + + { + printf '%s\n' "Detected ${failed_cases} failing test case(s) in ${suite_name:-$(basename "${json_file}")}" + printf '%s\n' "${failure_list}" + echo + } >> "${suite_dir}/failed-tests.txt" + + cp "${json_file}" "${suite_dir}/" + + suite_logs_output="$( + jq -r ' + [ + .simLog?, + .testDetailsLog?, + (.testCases[]? | select(.summaryResult.pass != true) | .clientInfo? | to_entries? // [] | map(.value.logFile? // empty) | .[]), + (.testCases[]? | select(.summaryResult.pass != true) | .summaryResult.logFile?), + (.testCases[]? | select(.summaryResult.pass != true) | .logFile?) + ] + | map(select(. != null and . != "")) + | unique + | .[] + ' "${json_file}" 2>/dev/null || true + )" + + if [ -n "${suite_logs_output}" ]; then + while IFS= read -r log_rel; do + [ -z "${log_rel}" ] && continue + + log_path="" + if [[ "${log_rel}" == /* ]]; then + if [ -f "${log_rel}" ]; then + log_path="${log_rel}" + fi + else + candidate_paths=( + "${results_dir}/${log_rel}" + "${results_dir}/logs/${log_rel}" + ) + if [ -n "${workspace_logs_dir}" ]; then + candidate_paths+=("${workspace_logs_dir}/${log_rel}") + fi + + for candidate in "${candidate_paths[@]}"; do + if [ -f "${candidate}" ]; then + log_path="${candidate}" + break + fi + done + fi + + if [ -z "${log_path}" ] && [[ "${log_rel}" != /* ]]; then + search_roots=("${results_dir}") + if [ -d "${results_dir}/logs" ]; then + search_roots+=("${results_dir}/logs") + fi + if [ -n "${workspace_logs_dir}" ]; then + search_roots+=("${workspace_logs_dir}") + fi + + for search_root in "${search_roots[@]}"; do + [ -d "${search_root}" ] || continue + found_log="$(find "${search_root}" -type f -name "$(basename "${log_rel}")" -print -quit 2>/dev/null || true)" + if [ -n "${found_log}" ]; then + log_path="${found_log}" + break + fi + done + fi + + if [ -n "${log_path}" ]; then + target_path="${suite_dir}/${log_rel}" + mkdir -p "$(dirname "${target_path}")" + if [ ! -f "${target_path}" ]; then + cp "${log_path}" "${target_path}" + fi + else + echo "Referenced log '${log_rel}' not found for suite ${suite_name:-$(basename "${json_file}")}" + fi + done <<< "${suite_logs_output}" + fi + + echo "Saved Hive failure artifacts to ${suite_dir}" + + failures=$((failures + failed_cases)) + fi +done + +if [ "${failures}" -gt 0 ]; then + echo "Hive reported ${failures} failing test cases in total" + exit 1 +fi + +echo "Hive reported no failing test cases." diff --git a/.github/workflows/pr-main_l1.yaml b/.github/workflows/pr-main_l1.yaml index c352ab07c63..ee880531e82 100644 --- a/.github/workflows/pr-main_l1.yaml +++ b/.github/workflows/pr-main_l1.yaml @@ -64,6 +64,7 @@ jobs: make test - name: Run Blockchain EF tests + if: ${{ github.event_name != 'merge_group' }} run: | make -C tooling/ef_tests/blockchain test @@ -143,55 +144,49 @@ jobs: include: - name: "Rpc Compat tests" simulation: ethereum/rpc-compat - limit: "" + limit: "rpc-compat/(debug_[^/]+/.*|eth_blobBaseFee/.*|eth_blockNumber/.*|eth_call/.*|eth_chainId/.*|eth_createAccessList/.*|eth_estimateGas/.*|eth_feeHistory/.*|eth_getBalance/.*|eth_getBlockByHash/.*|eth_getBlockByNumber/.*|eth_getBlockReceipts/.*|eth_getBlockTransactionCountByHash/.*|eth_getBlockTransactionCountByNumber/.*|eth_getCode/.*|eth_getLogs/.*|eth_getProof/.*|eth_getStorageAt/.*|eth_getTransactionByBlockHashAndIndex/.*|eth_getTransactionByBlockNumberAndIndex/.*|eth_getTransactionByHash/.*|eth_getTransactionCount/.*|eth_getTransactionReceipt/.*|eth_sendRawTransaction/.*)" hive_repository: lambdaclass/hive hive_version: 115f4d6ef1bdd2bfcabe29ec60424f6327e92f43 artifact_prefix: rpc_compat - name: "Devp2p tests" simulation: devp2p - limit: discv4|eth|snap/Ping|Findnode/WithoutEndpointProof|Findnode/PastExpiration|Amplification|Status|StorageRanges|ByteCodes|GetBlockHeaders|SimultaneousRequests|SameRequestID|ZeroRequestID|GetBlockBodies|MaliciousHandshake|MaliciousStatus|Transaction|NewPooledTxs|GetBlockReceipts|LargeTxRequest|InvalidTxs|BlockRangeUpdate - # AccountRange and GetTrieNodes don't pass anymore. + limit: discv4|eth|snap/Ping|Findnode/WithoutEndpointProof|Findnode/PastExpiration|Amplification|Status|StorageRanges|ByteCodes|GetBlockHeaders|SimultaneousRequests|SameRequestID|ZeroRequestID|GetBlockBodies|MaliciousHandshake|MaliciousStatus|NewPooledTxs|GetBlockReceipts|BlockRangeUpdate|GetTrieNodes # Findnode/BasicFindnode fails due to packets being processed out of order # Findnode/UnsolicitedNeighbors flaky in CI very occasionally. When fixed replace all "Findnode/" with "Findnode" - hive_repository: lambdaclass/hive - hive_version: 115f4d6ef1bdd2bfcabe29ec60424f6327e92f43 + hive_repository: ethereum/hive + hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 artifact_prefix: devp2p - name: "Engine Auth and EC tests" simulation: ethereum/engine limit: engine-(auth|exchange-capabilities)/ hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a + hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 artifact_prefix: engine_auth_ec - - name: "Cancun Engine tests" - simulation: ethereum/engine - limit: "engine-cancun" - hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a - artifact_prefix: engine_cancun + # - name: "Cancun Engine tests" + # simulation: ethereum/engine + # limit: "engine-cancun" + # hive_repository: ethereum/hive + # hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 + # artifact_prefix: engine_cancun - name: "Paris Engine tests" simulation: ethereum/engine limit: "engine-api" hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a + hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 artifact_prefix: engine_paris - name: "Engine withdrawal tests" simulation: ethereum/engine limit: "engine-withdrawals/Corrupted Block Hash Payload|Empty Withdrawals|engine-withdrawals test loader|GetPayloadBodies|GetPayloadV2 Block Value|Max Initcode Size|Sync after 2 blocks - Withdrawals on Genesis|Withdraw many accounts|Withdraw to a single account|Withdraw to two accounts|Withdraw zero amount|Withdraw many accounts|Withdrawals Fork on Block 1 - 1 Block Re-Org|Withdrawals Fork on Block 1 - 8 Block Re-Org NewPayload|Withdrawals Fork on Block 2|Withdrawals Fork on Block 3|Withdrawals Fork on Block 8 - 10 Block Re-Org NewPayload|Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org [^S]|Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org [^S]" hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a + hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 artifact_prefix: engine_withdrawals - - name: "Sync full" - simulation: ethereum/sync - limit: "" - hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a - artifact_prefix: sync_full - - name: "Sync snap" - simulation: ethereum/sync - limit: "" - hive_repository: ethereum/hive - hive_version: 2b7a9c007770b10cb1a242a6c6de88c87a383e5a - artifact_prefix: sync_snap + # Investigate this test + # - name: "Sync" + # simulation: ethereum/sync + # limit: "" + # hive_repository: ethereum/hive + # hive_version: c7deebe0c604248e90f27de51f6037d4b7b8c5b5 + # artifact_prefix: sync steps: - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@v1.3.1 @@ -245,8 +240,20 @@ jobs: client: ethrex client_config: ${{ steps.client-config.outputs.config }} extra_flags: ${{ steps.hive-flags.outputs.flags }} - workflow_artifact_upload: true - workflow_artifact_prefix: ${{ matrix.artifact_prefix }} + + - name: Check Hive Results For Failures + id: verify-hive-results + if: ${{ success() }} + shell: bash + run: ./.github/scripts/check-hive-results.sh src/results + + - name: Upload Hive Failure Logs + if: ${{ failure() && steps.verify-hive-results.conclusion == 'failure' }} + uses: actions/upload-artifact@v4 + with: + name: hive_failed_logs_${{ matrix.artifact_prefix }} + path: src/results/failed_logs + if-no-files-found: warn # The purpose of this job is to add it as a required check in GitHub so that we don't have to add every individual job as a required check all-tests: