Skip to content

Commit 48f7751

Browse files
authored
ci(l1): reenable engine-cancun hive simulation (#4876)
**Motivation** Improve Hive test coverage **Description** - Adds back cancun tests. This required to lower the parallelization to 4, not sure why.
1 parent e193d3c commit 48f7751

File tree

2 files changed

+216
-31
lines changed

2 files changed

+216
-31
lines changed

.github/scripts/check-hive-results.sh

Lines changed: 209 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,21 @@ if ! command -v jq >/dev/null 2>&1; then
1010
exit 1
1111
fi
1212

13+
if ! command -v python3 >/dev/null 2>&1; then
14+
echo "python3 is required to process Hive client logs but was not found in PATH"
15+
exit 1
16+
fi
17+
18+
slugify() {
19+
local input="${1:-}"
20+
local lowered trimmed
21+
lowered="$(printf '%s' "${input}" | tr '[:upper:]' '[:lower:]')"
22+
trimmed="$(printf '%s' "${lowered}" | sed -E 's/[^a-z0-9._-]+/-/g')"
23+
trimmed="${trimmed#-}"
24+
trimmed="${trimmed%-}"
25+
printf '%s' "${trimmed}"
26+
}
27+
1328
results_dir="${1:-src/results}"
1429

1530
if [ ! -d "$results_dir" ]; then
@@ -86,15 +101,13 @@ for json_file in "${json_files[@]}"; do
86101
{
87102
echo "### Hive failures: ${suite_name:-$(basename "${json_file}" .json)}"
88103
printf '%s\n' "${failure_list}"
104+
echo "Note: Hive scenarios may include multiple ethrex clients, so each failing case can have more than one log snippet."
89105
echo
90106
} >> "${GITHUB_STEP_SUMMARY}"
91107
fi
92108

93109
suite_slug_raw="${suite_name:-$(basename "${json_file}" .json)}"
94-
suite_slug="$(printf '%s' "${suite_slug_raw}" | tr '[:upper:]' '[:lower:]')"
95-
suite_slug="$(printf '%s' "${suite_slug}" | sed -E 's/[^a-z0-9._-]+/-/g')"
96-
suite_slug="${suite_slug#-}"
97-
suite_slug="${suite_slug%-}"
110+
suite_slug="$(slugify "${suite_slug_raw}")"
98111
suite_dir="${failed_logs_root}/${suite_slug:-suite}"
99112
mkdir -p "${suite_dir}"
100113

@@ -178,6 +191,198 @@ for json_file in "${json_files[@]}"; do
178191
done <<< "${suite_logs_output}"
179192
fi
180193

194+
client_case_entries="$(
195+
jq -r '
196+
.testCases
197+
| to_entries[]
198+
| select(.value.summaryResult.pass != true)
199+
| . as $case_entry
200+
| ($case_entry.value.clientInfo? // {}) | to_entries[]
201+
| [
202+
.value.logFile // "",
203+
($case_entry.value.name // ("case-" + $case_entry.key)),
204+
$case_entry.key,
205+
($case_entry.value.start // ""),
206+
($case_entry.value.end // ""),
207+
.key
208+
]
209+
| @tsv
210+
' "${json_file}" 2>/dev/null || true
211+
)"
212+
generated_client_snippets=0
213+
if [ -n "${client_case_entries}" ]; then
214+
client_logs_dir="${suite_dir}/client_logs"
215+
mkdir -p "${client_logs_dir}"
216+
217+
while IFS= read -r client_entry; do
218+
[ -n "${client_entry}" ] || continue
219+
IFS=$'\t' read -r client_log_rel raw_case_name case_id case_start case_end client_id <<< "${client_entry}"
220+
221+
if [ -z "${client_log_rel}" ] || [ -z "${case_start}" ] || [ -z "${case_end}" ]; then
222+
continue
223+
fi
224+
225+
log_copy_path="${suite_dir}/${client_log_rel}"
226+
if [ ! -f "${log_copy_path}" ]; then
227+
continue
228+
fi
229+
230+
case_slug="$(slugify "${raw_case_name}")"
231+
if [ -n "${case_slug}" ]; then
232+
case_slug="${case_slug}-case-${case_id}"
233+
else
234+
case_slug="case-${case_id}"
235+
fi
236+
237+
client_slug="$(slugify "${client_id}")"
238+
if [ -z "${client_slug}" ]; then
239+
client_slug="client"
240+
fi
241+
242+
case_dir="${client_logs_dir}/${case_slug}"
243+
mkdir -p "${case_dir}"
244+
snippet_path="${case_dir}/${client_slug}.log"
245+
246+
python3 - "${log_copy_path}" "${snippet_path}" "${raw_case_name}" "${case_start}" "${case_end}" "${client_id}" "${client_log_rel}" <<'PY'
247+
import sys
248+
from datetime import datetime, timedelta
249+
from pathlib import Path
250+
251+
FORMATS = ("%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%SZ")
252+
CONTEXT_SECONDS = 2
253+
PREFETCH_LIMIT = 50
254+
255+
def normalise_timestamp_str(value):
256+
if not value or not value.endswith("Z"):
257+
return value
258+
prefix = value[:-1]
259+
if "." not in prefix:
260+
return value
261+
base, frac = prefix.split(".", 1)
262+
frac_digits = "".join(ch for ch in frac if ch.isdigit())
263+
if not frac_digits:
264+
return f"{base}.000000Z"
265+
frac_digits = (frac_digits + "000000")[:6]
266+
return f"{base}.{frac_digits}Z"
267+
268+
def parse_timestamp(value):
269+
if not value:
270+
return None
271+
value = normalise_timestamp_str(value)
272+
for fmt in FORMATS:
273+
try:
274+
return datetime.strptime(value, fmt)
275+
except ValueError:
276+
continue
277+
return None
278+
279+
def timestamp_from_line(line):
280+
if not line:
281+
return None
282+
token = line.split(" ", 1)[0]
283+
if not token or not token[0].isdigit():
284+
return None
285+
token = normalise_timestamp_str(token)
286+
for fmt in FORMATS:
287+
try:
288+
return datetime.strptime(token, fmt)
289+
except ValueError:
290+
continue
291+
return None
292+
293+
log_path = Path(sys.argv[1])
294+
output_path = Path(sys.argv[2])
295+
case_name = sys.argv[3]
296+
case_start_raw = sys.argv[4]
297+
case_end_raw = sys.argv[5]
298+
client_id = sys.argv[6] or "unknown"
299+
client_log_rel = sys.argv[7]
300+
301+
try:
302+
log_content = log_path.read_text(encoding="utf-8", errors="replace").splitlines(keepends=True)
303+
except Exception as exc:
304+
output_path.parent.mkdir(parents=True, exist_ok=True)
305+
output_path.write_text(f"# Failed to read log '{log_path}': {exc}\n", encoding="utf-8")
306+
sys.exit(0)
307+
308+
start_ts = parse_timestamp(case_start_raw)
309+
end_ts = parse_timestamp(case_end_raw)
310+
311+
fallback_reason = None
312+
if not start_ts or not end_ts or end_ts < start_ts:
313+
fallback_reason = "Unable to determine reliable time window from test metadata."
314+
else:
315+
start_ts = start_ts - timedelta(seconds=CONTEXT_SECONDS)
316+
end_ts = end_ts + timedelta(seconds=CONTEXT_SECONDS)
317+
318+
captured_lines = []
319+
prefetch = []
320+
current_ts = None
321+
capturing = False
322+
323+
if not fallback_reason:
324+
for line in log_content:
325+
ts = timestamp_from_line(line)
326+
if ts is not None:
327+
current_ts = ts
328+
329+
if not capturing:
330+
prefetch.append(line)
331+
if len(prefetch) > PREFETCH_LIMIT:
332+
prefetch.pop(0)
333+
334+
in_window = current_ts is not None and start_ts <= current_ts <= end_ts
335+
336+
if in_window:
337+
if not capturing:
338+
captured_lines.extend(prefetch)
339+
capturing = True
340+
captured_lines.append(line)
341+
elif capturing and current_ts is not None and current_ts > end_ts:
342+
break
343+
elif capturing:
344+
captured_lines.append(line)
345+
346+
if not captured_lines:
347+
fallback_reason = "No timestamped log lines matched the computed time window."
348+
349+
if fallback_reason:
350+
captured_lines = log_content
351+
352+
header_lines = [
353+
f"# Test: {case_name}\n",
354+
f"# Client ID: {client_id}\n",
355+
f"# Source log: {client_log_rel}\n",
356+
]
357+
358+
if start_ts and end_ts and not fallback_reason:
359+
header_lines.append(
360+
f"# Time window (UTC): {case_start_raw} .. {case_end_raw} (with ±{CONTEXT_SECONDS}s context)\n"
361+
)
362+
else:
363+
header_lines.append("# Time window (UTC): unavailable\n")
364+
365+
if fallback_reason:
366+
header_lines.append(f"# NOTE: {fallback_reason}\n")
367+
368+
header_lines.append("\n")
369+
370+
output_path.parent.mkdir(parents=True, exist_ok=True)
371+
with output_path.open("w", encoding="utf-8") as dst:
372+
dst.writelines(header_lines)
373+
dst.writelines(captured_lines)
374+
PY
375+
376+
if [ -s "${snippet_path}" ]; then
377+
generated_client_snippets=$((generated_client_snippets + 1))
378+
fi
379+
done <<< "${client_case_entries}"
380+
fi
381+
382+
if [ "${generated_client_snippets}" -gt 0 ]; then
383+
echo "Generated ${generated_client_snippets} client log snippet(s) in ${client_logs_dir}"
384+
fi
385+
181386
echo "Saved Hive failure artifacts to ${suite_dir}"
182387

183388
failures=$((failures + failed_cases))

.github/workflows/pr-main_l1.yaml

Lines changed: 7 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -146,55 +146,35 @@ jobs:
146146
simulation: ethereum/rpc-compat
147147
# https://github.com/ethereum/execution-apis/pull/627 changed the simulation to use a pre-merge genesis block, so we need to pin to a commit before that
148148
buildarg: "branch=d08382ae5c808680e976fce4b73f4ba91647199b"
149-
hive_repository: ethereum/hive
150-
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
151149
artifact_prefix: rpc_compat
152150
- name: "Devp2p tests"
153151
simulation: devp2p
154152
limit: discv4|eth|snap/Ping|Findnode/WithoutEndpointProof|Findnode/PastExpiration|Amplification|Status|StorageRanges|ByteCodes|GetBlockHeaders|SimultaneousRequests|SameRequestID|ZeroRequestID|GetBlockBodies|MaliciousHandshake|MaliciousStatus|NewPooledTxs|GetBlockReceipts|BlockRangeUpdate|GetTrieNodes
155153
# Findnode/BasicFindnode fails due to packets being processed out of order
156154
# Findnode/UnsolicitedNeighbors flaky in CI very occasionally. When fixed replace all "Findnode/<test>" with "Findnode"
157-
hive_repository: ethereum/hive
158-
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
159155
artifact_prefix: devp2p
160156
- name: "Engine Auth and EC tests"
161157
simulation: ethereum/engine
162158
limit: engine-(auth|exchange-capabilities)/
163-
hive_repository: ethereum/hive
164-
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
165159
artifact_prefix: engine_auth_ec
166-
# - name: "Cancun Engine tests"
167-
# simulation: ethereum/engine
168-
# limit: "engine-cancun"
169-
# hive_repository: ethereum/hive
170-
# hive_version: 7709e5892146c793307da072e1593f48039a7e4b
171-
# artifact_prefix: engine_cancun
160+
- name: "Cancun Engine tests"
161+
simulation: ethereum/engine
162+
limit: "engine-cancun"
163+
artifact_prefix: engine_cancun
172164
- name: "Paris Engine tests"
173165
simulation: ethereum/engine
174166
limit: "engine-api"
175-
hive_repository: ethereum/hive
176-
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
177167
artifact_prefix: engine_paris
178168
- name: "Engine withdrawal tests"
179169
simulation: ethereum/engine
180170
limit: "engine-withdrawals/Corrupted Block Hash Payload|Empty Withdrawals|engine-withdrawals test loader|GetPayloadBodies|GetPayloadV2 Block Value|Max Initcode Size|Sync after 2 blocks - Withdrawals on Genesis|Withdraw many accounts|Withdraw to a single account|Withdraw to two accounts|Withdraw zero amount|Withdraw many accounts|Withdrawals Fork on Block 1 - 1 Block Re-Org|Withdrawals Fork on Block 1 - 8 Block Re-Org NewPayload|Withdrawals Fork on Block 2|Withdrawals Fork on Block 3|Withdrawals Fork on Block 8 - 10 Block Re-Org NewPayload|Withdrawals Fork on Canonical Block 8 / Side Block 7 - 10 Block Re-Org [^S]|Withdrawals Fork on Canonical Block 8 / Side Block 9 - 10 Block Re-Org [^S]"
181-
hive_repository: ethereum/hive
182-
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
183171
artifact_prefix: engine_withdrawals
184172
# Investigate this test
185173
# - name: "Sync"
186174
# simulation: ethereum/sync
187175
# limit: ""
188-
# hive_repository: ethereum/hive
189-
# hive_version: 7709e5892146c793307da072e1593f48039a7e4b
190176
# artifact_prefix: sync
191177
steps:
192-
- name: Free Disk Space (Ubuntu)
193-
uses: jlumbroso/[email protected]
194-
with:
195-
tool-cache: false
196-
large-packages: false
197-
198178
- name: Checkout sources
199179
uses: actions/checkout@v4
200180

@@ -225,7 +205,7 @@ jobs:
225205
SIM_LIMIT: ${{ matrix.limit }}
226206
SIM_BUILDARG: ${{ matrix.buildarg }}
227207
run: |
228-
FLAGS='--sim.parallelism 16 --sim.loglevel 1'
208+
FLAGS='--sim.parallelism 4 --sim.loglevel 3'
229209
if [[ -n "$SIM_LIMIT" ]]; then
230210
escaped_limit=${SIM_LIMIT//\'/\'\\\'\'}
231211
FLAGS+=" --sim.limit '$escaped_limit'"
@@ -239,8 +219,8 @@ jobs:
239219
id: run-hive-action
240220
uses: ethpandaops/[email protected]
241221
with:
242-
hive_repository: ${{ matrix.hive_repository }}
243-
hive_version: ${{ matrix.hive_version }}
222+
hive_repository: ethereum/hive
223+
hive_version: 7709e5892146c793307da072e1593f48039a7e4b
244224
simulator: ${{ matrix.simulation }}
245225
client: ethrex
246226
client_config: ${{ steps.client-config.outputs.config }}

0 commit comments

Comments
 (0)