From 14d597e07858baf1fe18ff67605ed674c118a33d Mon Sep 17 00:00:00 2001 From: andri lim Date: Sat, 10 May 2025 18:54:21 +0700 Subject: [PATCH 001/138] Update FC.baseTxFrame after txFrame persisted (#3272) If the `baseTxFrame` is not updated, and `updateBase` yield to async event loop. Other module will access expired `baseTxFrame`. e.g. `getStatus` of eth/68 will crash the program. --- execution_chain/core/chain/forked_chain.nim | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 44f41242af..6af4a03ad9 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -418,7 +418,6 @@ proc updateBase(c: ForkedChainRef, newBase: BlockPos): # the blocks newBaseHash = newBase.hash nextIndex = int(newBase.number - branch.tailNumber) - baseTxFrame = newBase.txFrame # Persist the new base block - this replaces the base tx in coredb! for x in newBase.everyNthBlock(4): @@ -432,7 +431,9 @@ proc updateBase(c: ForkedChainRef, newBase: BlockPos): discard await idleAsync().withTimeout(idleTimeout) c.com.db.persist(x.txFrame, Opt.some(x.stateRoot)) - c.baseTxFrame = baseTxFrame + # Update baseTxFrame when we about to yield to the event loop + # and prevent other modules accessing expired baseTxFrame. + c.baseTxFrame = x.txFrame disposeBlocks(number, branch) From 8c8a176ccebf2a9509e4c717e248ec0cacd85ad7 Mon Sep 17 00:00:00 2001 From: tersec Date: Tue, 13 May 2025 01:10:22 +0000 Subject: [PATCH 002/138] fix some Nim 2.2 warnings (#3276) * fix some Nim 2.2 warnings * copyright year linting * macOS Sonoma doesn't change oldest support x86 CPU type --- config.nims | 4 ++-- execution_chain/makefile | 12 +++++++++++- tests/test_rpc.nim | 3 --- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/config.nims b/config.nims index abeee5d639..52549f8d0f 100644 --- a/config.nims +++ b/config.nims @@ -84,8 +84,8 @@ if defined(windows): if defined(disableMarchNative): if defined(i386) or defined(amd64): if defined(macosx): - # https://support.apple.com/en-us/102861 - # "macOS Ventura is compatible with these computers" lists current oldest + # https://support.apple.com/en-us/105113 + # "macOS Sonoma is compatible with these computers" lists current oldest # supported x86 models, all of which have Kaby Lake or newer CPUs. switch("passC", "-march=skylake -mtune=generic") switch("passL", "-march=skylake -mtune=generic") diff --git a/execution_chain/makefile b/execution_chain/makefile index fa1a9496be..699cf30548 100644 --- a/execution_chain/makefile +++ b/execution_chain/makefile @@ -1,5 +1,15 @@ #! /usr/bin/make -f +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. + SAVED_PATH := $(PATH) PWD := $(shell pwd) @@ -52,7 +62,7 @@ NIMDOC_FLAGS += -d:debug -d:disable_libbacktrace NIMDOC_FLAGS += $(NIMFLAGS) # Nim check flags -NIMCHK_FLAGS := c -r --verbosity:0 --hints:off --warnings:off +NIMCHK_FLAGS := c -r --verbosity:0 --warnings:off # Markdown compiler (test for discount tool with tables support) MD_CMD := markdown diff --git a/tests/test_rpc.nim b/tests/test_rpc.nim index 629c567caf..28074715f5 100644 --- a/tests/test_rpc.nim +++ b/tests/test_rpc.nim @@ -18,7 +18,6 @@ import ../execution_chain/[constants, transaction, config, version], ../execution_chain/db/[ledger, storage_types], ../execution_chain/sync/wire_protocol, - ../execution_chain/portal/portal, ../execution_chain/core/[tx_pool, chain, pow/difficulty], ../execution_chain/utils/utils, ../execution_chain/[common, rpc], @@ -30,8 +29,6 @@ import ./test_block_fixture type - Hash32 = common.Hash32 - TestEnv = object conf : NimbusConf com : CommonRef From d3215efbe9783b05711409c257b1658862fcb348 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Tue, 13 May 2025 10:25:49 +0200 Subject: [PATCH 003/138] metrics: bump (#3274) * removes `_created` metrics from gauges (they should never have been there) * allow labelled metrics to be created from any thread --- .../db/aristo/aristo_init/rocks_db/rdb_get.nim | 11 +++++++++-- vendor/nim-eth | 2 +- vendor/nim-metrics | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim index b63837a73e..4c058c7082 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim @@ -42,12 +42,19 @@ when defined(metrics): "aristo_rdb_vtx_lru_total", "Vertex LRU lookup (hit/miss, world/account, branch/leaf)", labels = ["state", "vtype", "hit"], + standardType = "counter", ) rdbKeyLruStatsMetric {.used.} = RdbKeyLruCounter.newCollector( - "aristo_rdb_key_lru_total", "HashKey LRU lookup", labels = ["state", "hit"] + "aristo_rdb_key_lru_total", + "HashKey LRU lookup", + labels = ["state", "hit"], + standardType = "counter", ) rdbBranchLruStatsMetric {.used.} = RdbBranchLruCounter.newCollector( - "aristo_rdb_branch_lru_total", "Branch LRU lookup", labels = ["state", "hit"] + "aristo_rdb_branch_lru_total", + "Branch LRU lookup", + labels = ["state", "hit"], + standardType = "counter", ) method collect*(collector: RdbVtxLruCounter, output: MetricHandler) = diff --git a/vendor/nim-eth b/vendor/nim-eth index 3b35e445aa..49bd8f5974 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit 3b35e445aa49b1eaef0ed68378a3ff3e1eb1dd7a +Subproject commit 49bd8f59740049f6f90117c61cdb03fe727ddcdb diff --git a/vendor/nim-metrics b/vendor/nim-metrics index f1f8869578..ecf64c6078 160000 --- a/vendor/nim-metrics +++ b/vendor/nim-metrics @@ -1 +1 @@ -Subproject commit f1f886957831e9f2f16ffa728abb4bf44bfd5b98 +Subproject commit ecf64c6078d1276d3b7d9b3d931fbdb70004db11 From 55a661c006e31fee84e24186b2efab2e5c414669 Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Tue, 13 May 2025 10:33:24 +0000 Subject: [PATCH 004/138] Beaon sync avoid import slowdown with last slow sync peer (#3279) * Discard peer immediately after `PeerDisconnected` exception why Otherwise it would follow the drill which is to be repeatedly tried again unless the maximum number of failures is reached. * Register last slow sync peer why: Previously (before PR #3204) the sync peer simply would have been zombified and discarded so that there were no sync peers left. Then PR #3204 introduced a concept of ignoring any error of the last peer via the `infectedByTVirus()` function. This opened a can of worms which was mitigated by PR #3269 by only keeping the last sync peer non-zombified if it was labelled `slow`. The last measure can lead to a heavy syncer slow down while queuing blocks if there is only a slow peer available. It will try to fill the queue first while it makes more sense to import blocks allowing the syncer to collect more sync peers. This patch registers the current peer as the last one labelled slow. It is up to other functions to exploit that fact. * Also start import while there is only one slow sync peer left. why: See explanation on previous patch. * Remove stray debugging statement --- .../sync/beacon/worker/blocks_staged.nim | 8 ++++++++ .../sync/beacon/worker/blocks_staged/bodies.nim | 17 +++++++++++++++-- .../beacon/worker/headers_staged/headers.nim | 7 +++++++ .../sync/beacon/worker/start_stop.nim | 4 ++-- execution_chain/sync/beacon/worker_desc.nim | 4 ++-- 5 files changed, 34 insertions(+), 6 deletions(-) diff --git a/execution_chain/sync/beacon/worker/blocks_staged.nim b/execution_chain/sync/beacon/worker/blocks_staged.nim index fa12eb4f60..935219f036 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged.nim @@ -159,6 +159,14 @@ func blocksStagedCanImportOk*(ctx: BeaconCtxRef): bool = if ctx.pool.nBuddies == 0: return true + # If the last peer is labelled `slow` it will be ignored for the sake + # of deciding whether to execute blocks. + # + # As a consequence, the syncer will import blocks immediately allowing + # the syncer to collect more sync peers. + if ctx.pool.nBuddies == 1 and ctx.pool.blkLastSlowPeer.isSome: + return true + # If importing starts while peers are actively downloading, the system # tends to loose download peers, most probably due to high system # activity. diff --git a/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim b/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim index 5ee8adb09c..c4a81da9fe 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim @@ -27,7 +27,13 @@ func bdyErrors*(buddy: BeaconBuddyRef): string = proc fetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = buddy.only.nBdyRespErrors.inc if fetchBodiesReqErrThresholdCount < buddy.only.nBdyRespErrors: - if 1 < buddy.ctx.pool.nBuddies or not slowPeer: + if buddy.ctx.pool.nBuddies == 1 and slowPeer: + # Remember that the current peer is the last one and is lablelled slow. + # It would have been zombified if it were not the last one. This can be + # used in functions -- depending on context -- that will trigger if the + # if the pool of available sync peers becomes empty. + buddy.ctx.pool.blkLastSlowPeer = Opt.some(buddy.peerID) + else: buddy.ctrl.zombie = true # abandon slow peer unless last one proc bodiesFetch*( @@ -47,6 +53,12 @@ proc bodiesFetch*( var resp: Opt[BlockBodiesPacket] try: resp = await peer.getBlockBodies(request) + except PeerDisconnected as e: + buddy.only.nBdyRespErrors.inc + buddy.ctrl.zombie = true + `info` info & " error", peer, nReq, elapsed=(Moment.now() - start).toStr, + error=($e.name), msg=e.msg, bdyErrors=buddy.bdyErrors + return err() except CatchableError as e: buddy.fetchRegisterError() `info` info & " error", peer, nReq, elapsed=(Moment.now() - start).toStr, @@ -83,7 +95,8 @@ proc bodiesFetch*( b.len.uint64 * 100 < nReq.uint64 * fetchBodiesReqMinResponsePC: buddy.fetchRegisterError(slowPeer=true) else: - buddy.only.nBdyRespErrors = 0 # reset error count + buddy.only.nBdyRespErrors = 0 # reset error count + buddy.ctx.pool.blkLastSlowPeer = Opt.none(Hash) # not last one or not error trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, bdyErrors=buddy.bdyErrors diff --git a/execution_chain/sync/beacon/worker/headers_staged/headers.nim b/execution_chain/sync/beacon/worker/headers_staged/headers.nim index 8fcaabb7a1..a8eeb4ab12 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/headers.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/headers.nim @@ -79,6 +79,13 @@ proc headersFetchReversed*( # reliably be used in a `withTimeout()` directive. It would rather crash # in `rplx` with a violated `req.timeoutAt <= Moment.now()` assertion. resp = await peer.getBlockHeaders(req) + except PeerDisconnected as e: + buddy.only.nBdyRespErrors.inc + buddy.ctrl.zombie = true + `info` info & " error", peer, ivReq, nReq=req.maxResults, + hash=topHash.toStr, elapsed=(Moment.now() - start).toStr, + error=($e.name), msg=e.msg, hdrErrors=buddy.hdrErrors + return err() except CatchableError as e: buddy.registerError() `info` info & " error", peer, ivReq, nReq=req.maxResults, diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index 54d73ef1ae..68726c0705 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -11,8 +11,7 @@ {.push raises:[].} import - pkg/chronicles, - pkg/eth/common, + pkg/[chronicles, eth/common, results], ../../../networking/p2p, ../../wire_protocol, ../worker_desc, @@ -98,6 +97,7 @@ proc startBuddy*(buddy: BeaconBuddyRef): bool = if peer.supports(wire_protocol.eth) and peer.state(wire_protocol.eth).initialized: ctx.pool.nBuddies.inc + ctx.pool.blkLastSlowPeer = Opt.none(Hash) buddy.initHdrProcErrors() return true diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index 01cef7a39a..d406f7bf27 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -12,8 +12,7 @@ import std/sets, - pkg/chronos, - pkg/eth/common, + pkg/[chronos, eth/common, results], pkg/stew/[interval_set, sorted_set], ../../core/chain, ../sync_desc, @@ -113,6 +112,7 @@ type # Info, debugging, and error handling stuff nReorg*: int ## Number of reorg invocations (info only) hdrProcError*: Table[Hash,uint8] ## Some globally accessible header errors + blkLastSlowPeer*: Opt[Hash] ## Register slow peer when last one failedPeers*: HashSet[Hash] ## Detect dead end sync by collecting peers seenData*: bool ## Set `true` is data were fetched, already From 61687a1f26643b6f2975b5d0f2ed684ec8845d7b Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 13 May 2025 13:20:32 +0200 Subject: [PATCH 005/138] Add access from History network to historical summaries (#3281) Add access from History network to historical summaries for the verification of Capella and onwards block proofs. Access is provided by adding the BeaconDbCache to the history network, more specifical to the HeaderVerifier (before called Accumulators). This approach is taken, over providing callbacks, as it is more in sync with how StateNetwork accesses the HistoryNetwork. It might be still be considered to move to callbacks in the future though as that could provide a more "oracle" agnostic way of providing this data. The BeaconDbCache is created because for Ephemeral headers verification we will also need access to the Light client updates. Aside from the Light client updates, the historical summaries are also added to the cache in its decoded form for easy and fast access on block verification. Some changes are likely to be still required to avoid to many copies of the summaries, TBI. --- fluffy/network/beacon/beacon_db.nim | 123 +++++++++++++----- fluffy/network/beacon/beacon_network.nim | 6 + fluffy/network/history/history_network.nim | 19 +-- fluffy/network/history/history_validation.nim | 39 +++--- fluffy/portal_node.nim | 5 + .../test_beacon_network.nim | 6 + .../test_history_content.nim | 16 ++- 7 files changed, 156 insertions(+), 58 deletions(-) diff --git a/fluffy/network/beacon/beacon_db.nim b/fluffy/network/beacon/beacon_db.nim index 55bc82b10c..b4758ba81c 100644 --- a/fluffy/network/beacon/beacon_db.nim +++ b/fluffy/network/beacon/beacon_db.nim @@ -25,7 +25,7 @@ import from beacon_chain/spec/helpers import is_better_update, toMeta -export kvstore_sqlite3 +export kvstore_sqlite3, beacon_chain_historical_summaries, beacon_content type BestLightClientUpdateStore = ref object @@ -43,6 +43,7 @@ type HistoricalSummariesStore = ref object getStmt: SqliteStmt[int64, seq[byte]] + getLatestStmt: SqliteStmt[NoParams, seq[byte]] putStmt: SqliteStmt[(int64, seq[byte]), void] keepFromStmt: SqliteStmt[int64, void] @@ -55,19 +56,23 @@ type historicalSummaries: HistoricalSummariesStore forkDigests: ForkDigests cfg*: RuntimeConfig - finalityUpdateCache: Opt[LightClientFinalityUpdateCache] - optimisticUpdateCache: Opt[LightClientOptimisticUpdateCache] + beaconDbCache*: BeaconDbCache + + BeaconDbCache* = ref object + finalityUpdateCache*: Opt[LightClientFinalityUpdateCache] + optimisticUpdateCache*: Opt[LightClientOptimisticUpdateCache] + historicalSummariesCache*: Opt[HistoricalSummariesWithProof] # Storing the content encoded here. Could also store decoded and access the # slot directly. However, that would require is to have access to the # fork digests here to be able the re-encode the data. LightClientFinalityUpdateCache = object - lastFinalityUpdate: seq[byte] - lastFinalityUpdateSlot: uint64 + latestFinalityUpdate: seq[byte] + latestFinalityUpdateSlot: uint64 LightClientOptimisticUpdateCache = object - lastOptimisticUpdate: seq[byte] - lastOptimisticUpdateSlot: uint64 + latestOptimisticUpdate: seq[byte] + latestOptimisticUpdateSlot: uint64 template expectDb(x: auto): untyped = # There's no meaningful error handling implemented for a corrupt database or @@ -265,6 +270,20 @@ proc initHistoricalSummariesStore( managed = false, ) .expect("SQL query OK") + getLatestStmt = backend + .prepareStmt( + """ + SELECT `summaries` + FROM `""" & name & + """` + WHERE `epoch` = (SELECT MAX(epoch) FROM `""" & name & + """`); + """, + NoParams, + seq[byte], + managed = false, + ) + .expect("SQL query OK") putStmt = backend .prepareStmt( """ @@ -292,7 +311,10 @@ proc initHistoricalSummariesStore( .expect("SQL query OK") ok HistoricalSummariesStore( - getStmt: getStmt, putStmt: putStmt, keepFromStmt: keepFromStmt + getStmt: getStmt, + getLatestStmt: getLatestStmt, + putStmt: putStmt, + keepFromStmt: keepFromStmt, ) func close(store: var BestLightClientUpdateStore) = @@ -310,6 +332,7 @@ func close(store: var BootstrapStore) = func close(store: var HistoricalSummariesStore) = store.getStmt.disposeSafe() + store.getLatestStmt.disposeSafe() store.putStmt.disposeSafe() store.keepFromStmt.disposeSafe() @@ -340,6 +363,7 @@ proc new*( historicalSummaries: historicalSummaries, cfg: networkData.metadata.cfg, forkDigests: (newClone networkData.forks)[], + beaconDbCache: BeaconDbCache(), ) proc close*(db: BeaconDb) = @@ -348,6 +372,15 @@ proc close*(db: BeaconDb) = db.historicalSummaries.close() discard db.kv.close() +template finalityUpdateCache(db: BeaconDb): Opt[LightClientFinalityUpdateCache] = + db.beaconDbCache.finalityUpdateCache + +template optimisticUpdateCache(db: BeaconDb): Opt[LightClientOptimisticUpdateCache] = + db.beaconDbCache.optimisticUpdateCache + +template historicalSummariesCache(db: BeaconDb): Opt[HistoricalSummariesWithProof] = + db.beaconDbCache.historicalSummariesCache + ## Private KvStoreRef Calls proc get(kv: KvStoreRef, key: openArray[byte]): results.Opt[seq[byte]] = var res: results.Opt[seq[byte]] = Opt.none(seq[byte]) @@ -502,7 +535,7 @@ proc putUpdateIfBetter*(db: BeaconDb, period: SyncCommitteePeriod, update: seq[b proc getLastFinalityUpdate*(db: BeaconDb): Opt[ForkedLightClientFinalityUpdate] = db.finalityUpdateCache.map( proc(x: LightClientFinalityUpdateCache): ForkedLightClientFinalityUpdate = - decodeLightClientFinalityUpdateForked(db.forkDigests, x.lastFinalityUpdate).valueOr: + decodeLightClientFinalityUpdateForked(db.forkDigests, x.latestFinalityUpdate).valueOr: raiseAssert "Stored finality update must be valid" ) @@ -514,20 +547,45 @@ func keepBootstrapsFrom*(db: BeaconDb, minSlot: Slot) = let res = db.bootstraps.keepFromStmt.exec(minSlot.int64) res.expect("SQL query OK") -func getHistoricalSummaries*(db: BeaconDb, epoch: Epoch): Opt[seq[byte]] = - doAssert distinctBase(db.historicalSummaries.getStmt) != nil +proc getLatestHistoricalSummaries*(db: BeaconDb): Opt[seq[byte]] = + doAssert distinctBase(db.historicalSummaries.getLatestStmt) != nil var summaries: seq[byte] - for res in db.historicalSummaries.getStmt.exec(epoch.int64, summaries): + for res in db.historicalSummaries.getLatestStmt.exec(summaries): res.expect("SQL query OK") return ok(summaries) -func putHistoricalSummaries*(db: BeaconDb, summaries: seq[byte], epoch: Epoch) = +func loadHistoricalSummariesCache*(db: BeaconDb) = + let summariesEncoded = db.getLatestHistoricalSummaries().valueOr: + return + + let summariesWithProof = decodeSsz( + db.forkDigests, summariesEncoded, HistoricalSummariesWithProof + ).valueOr: + raiseAssert "Stored historical summaries must be valid" + + db.beaconDbCache.historicalSummariesCache = Opt.some(summariesWithProof) + +func putHistoricalSummaries(db: BeaconDb, summaries: seq[byte], epoch: Epoch) = db.historicalSummaries.putStmt.exec((epoch.int64, summaries)).expect("SQL query OK") -func keepHistoricalSummariesFrom*(db: BeaconDb, epoch: Epoch) = - let res = db.historicalSummaries.keepFromStmt.exec(epoch.int64) - res.expect("SQL query OK") +func keepHistoricalSummariesFrom(db: BeaconDb, epoch: Epoch) = + db.historicalSummaries.keepFromStmt.exec(epoch.int64).expect("SQL query OK") + +func putLatestHistoricalSummaries(db: BeaconDb, summaries: seq[byte]) = + let summariesWithProof = decodeSsz( + db.forkDigests, summaries, HistoricalSummariesWithProof + ).valueOr: + raiseAssert "Stored historical summaries must have been validated" + + if db.historicalSummariesCache.isNone() or + db.historicalSummariesCache.value().epoch < summariesWithProof.epoch: + # Store in cache in its decoded form + db.beaconDbCache.historicalSummariesCache = Opt.some(summariesWithProof) + # Store in db + db.putHistoricalSummaries(summaries, Epoch(summariesWithProof.epoch)) + # Delete old summaries + db.keepHistoricalSummariesFrom(Epoch(summariesWithProof.epoch)) proc getHandlerImpl( db: BeaconDb, contentKey: ContentKeyByteList, contentId: ContentId @@ -571,8 +629,8 @@ proc getHandlerImpl( if db.finalityUpdateCache.isSome(): let slot = contentKey.lightClientFinalityUpdateKey.finalizedSlot let cache = db.finalityUpdateCache.get() - if cache.lastFinalityUpdateSlot >= slot: - Opt.some(cache.lastFinalityUpdate) + if cache.latestFinalityUpdateSlot >= slot: + Opt.some(cache.latestFinalityUpdate) else: Opt.none(seq[byte]) else: @@ -582,14 +640,19 @@ proc getHandlerImpl( if db.optimisticUpdateCache.isSome(): let slot = contentKey.lightClientOptimisticUpdateKey.optimisticSlot let cache = db.optimisticUpdateCache.get() - if cache.lastOptimisticUpdateSlot >= slot: - Opt.some(cache.lastOptimisticUpdate) + if cache.latestOptimisticUpdateSlot >= slot: + Opt.some(cache.latestOptimisticUpdate) else: Opt.none(seq[byte]) else: Opt.none(seq[byte]) of beacon_content.ContentType.historicalSummaries: - db.getHistoricalSummaries(Epoch(contentKey.historicalSummariesKey.epoch)) + if db.historicalSummariesCache.isSome() and + db.historicalSummariesCache.value().epoch >= + contentKey.historicalSummariesKey.epoch: + db.getLatestHistoricalSummaries() + else: + Opt.none(seq[byte]) proc createGetHandler*(db: BeaconDb): DbGetHandler = return ( @@ -630,26 +693,23 @@ proc createStoreHandler*(db: BeaconDb): DbStoreHandler = db.putUpdateIfBetter(SyncCommitteePeriod(period), update.asSeq()) inc period of lightClientFinalityUpdate: - db.finalityUpdateCache = Opt.some( + db.beaconDbCache.finalityUpdateCache = Opt.some( LightClientFinalityUpdateCache( - lastFinalityUpdateSlot: + latestFinalityUpdateSlot: contentKey.lightClientFinalityUpdateKey.finalizedSlot, - lastFinalityUpdate: content, + latestFinalityUpdate: content, ) ) of lightClientOptimisticUpdate: - db.optimisticUpdateCache = Opt.some( + db.beaconDbCache.optimisticUpdateCache = Opt.some( LightClientOptimisticUpdateCache( - lastOptimisticUpdateSlot: + latestOptimisticUpdateSlot: contentKey.lightClientOptimisticUpdateKey.optimisticSlot, - lastOptimisticUpdate: content, + latestOptimisticUpdate: content, ) ) of beacon_content.ContentType.historicalSummaries: - db.putHistoricalSummaries( - content, Epoch(contentKey.historicalSummariesKey.epoch) - ) - db.keepHistoricalSummariesFrom(Epoch(contentKey.historicalSummariesKey.epoch)) + db.putLatestHistoricalSummaries(content) return false # No data pruned ) @@ -657,6 +717,7 @@ proc createStoreHandler*(db: BeaconDb): DbStoreHandler = proc createContainsHandler*(db: BeaconDb): DbContainsHandler = return ( proc(contentKey: ContentKeyByteList, contentId: ContentId): bool = + # TODO: Implement cheaper `contains` handlers db.getHandlerImpl(contentKey, contentId).isSome() ) diff --git a/fluffy/network/beacon/beacon_network.nim b/fluffy/network/beacon/beacon_network.nim index 5964aa1596..ddcbb14949 100644 --- a/fluffy/network/beacon/beacon_network.nim +++ b/fluffy/network/beacon/beacon_network.nim @@ -225,6 +225,9 @@ proc new*( else: trustedBlockRoot + # load from db to cache + beaconDb.loadHistoricalSummariesCache() + BeaconNetwork( portalProtocol: portalProtocol, beaconDb: beaconDb, @@ -340,6 +343,9 @@ proc validateContent( let summariesWithProof = ?decodeSsz(n.forkDigests, content, HistoricalSummariesWithProof) + if key.historicalSummariesKey.epoch != summariesWithProof.epoch: + return err("Epoch mismatch in historical_summaries") + n.validateHistoricalSummaries(summariesWithProof) proc validateContent( diff --git a/fluffy/network/history/history_network.nim b/fluffy/network/history/history_network.nim index a66132c2e4..76c10c2499 100644 --- a/fluffy/network/history/history_network.nim +++ b/fluffy/network/history/history_network.nim @@ -37,7 +37,7 @@ type contentDB*: ContentDB contentQueue*: AsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])] cfg*: RuntimeConfig - accumulators*: HistoryAccumulators + verifier*: HeaderVerifier processContentLoop: Future[void] statusLogLoop: Future[void] contentRequestRetries: int @@ -147,7 +147,7 @@ proc getVerifiedBlockHeader*( return Opt.none(Header) header = validateCanonicalHeaderBytes( - headerContent.content, id, n.accumulators, n.cfg + headerContent.content, id, n.verifier, n.cfg ).valueOr: n.portalProtocol.banNode( headerContent.receivedFrom.id, NodeBanDurationContentLookupFailedValidation @@ -306,7 +306,7 @@ proc validateContent( case contentKey.contentType of blockHeader: let _ = validateCanonicalHeaderBytes( - content, contentKey.blockHeaderKey.blockHash, n.accumulators, n.cfg + content, contentKey.blockHeaderKey.blockHash, n.verifier, n.cfg ).valueOr: return err("Failed validating block header: " & error) @@ -329,7 +329,7 @@ proc validateContent( ok() of blockNumber: let _ = validateCanonicalHeaderBytes( - content, contentKey.blockNumberKey.blockNumber, n.accumulators, n.cfg + content, contentKey.blockNumberKey.blockNumber, n.verifier, n.cfg ).valueOr: return err("Failed validating block header: " & error) @@ -348,6 +348,7 @@ proc new*( cfg: RuntimeConfig, accumulator: FinishedHistoricalHashesAccumulator = loadAccumulator(), historicalRoots: HistoricalRoots = loadHistoricalRoots(), + beaconDbCache: BeaconDbCache = BeaconDbCache(), bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, contentRequestRetries = 1, @@ -376,8 +377,10 @@ proc new*( contentDB: contentDB, contentQueue: contentQueue, cfg: cfg, - accumulators: HistoryAccumulators( - historicalHashes: accumulator, historicalRoots: historicalRoots + verifier: HeaderVerifier( + historicalHashes: accumulator, + historicalRoots: historicalRoots, + beaconDbCache: beaconDbCache, ), contentRequestRetries: contentRequestRetries, ) @@ -441,8 +444,8 @@ proc statusLogLoop(n: HistoryNetwork) {.async: (raises: []).} = proc start*(n: HistoryNetwork) = info "Starting Portal execution history network", protocolId = n.portalProtocol.protocolId, - historicalHashesAccumulatorRoot = hash_tree_root(n.accumulators.historicalHashes), - historiricalRootsRoot = hash_tree_root(n.accumulators.historicalRoots) + historicalHashesAccumulatorRoot = hash_tree_root(n.verifier.historicalHashes), + historicalRootsRoot = hash_tree_root(n.verifier.historicalRoots) n.portalProtocol.start() diff --git a/fluffy/network/history/history_validation.nim b/fluffy/network/history/history_validation.nim index 51710d90a4..3c1413d9b4 100644 --- a/fluffy/network/history/history_validation.nim +++ b/fluffy/network/history/history_validation.nim @@ -12,6 +12,7 @@ import eth/trie/ordered_trie, beacon_chain/spec/presets, ../../network_metadata, + ../beacon/beacon_db, ./history_type_conversions, ./validation/[ block_proof_historical_hashes_accumulator, block_proof_historical_roots, @@ -20,12 +21,17 @@ import from eth/rlp import computeRlpHash -export block_proof_historical_hashes_accumulator +export block_proof_historical_hashes_accumulator, beacon_db.BeaconDbCache -type HistoryAccumulators* = object +type HeaderVerifier* = object historicalHashes*: FinishedHistoricalHashesAccumulator historicalRoots*: HistoricalRoots - historicalSummaries*: HistoricalSummaries + beaconDbCache*: BeaconDbCache + +template getHistoricalSummaries( + verifier: HeaderVerifier +): Opt[HistoricalSummariesWithProof] = + verifier.beaconDbCache.historicalSummariesCache func validateHeader(header: Header, blockHash: Hash32): Result[void, string] = if not (header.computeRlpHash() == blockHash): @@ -59,30 +65,36 @@ func validateHeaderBytes*( ok(header) func verifyBlockHeaderProof*( - a: HistoryAccumulators, + v: HeaderVerifier, header: Header, proof: ByteList[MAX_HEADER_PROOF_LENGTH], cfg: RuntimeConfig, ): Result[void, string] = let timestamp = Moment.init(header.timestamp.int64, Second) - # Note: As long as no up to date historical_summaries list is provided Capella - # and onwards will always fail verification. + # Note: If no up to date historical_summaries list is provided verification + # will still fail for the most recent headers. if isCancun(chainConfig, timestamp): + let summaries = v.getHistoricalSummaries().valueOr: + return err("No historical_summaries available for verification") + let proof = decodeSsz(proof.asSeq(), BlockProofHistoricalSummariesDeneb).valueOr: return err("Failed decoding historical_summaries based block proof: " & error) - if a.historicalSummaries.verifyProof( + if summaries.historical_summaries.verifyProof( proof, Digest(data: header.computeRlpHash().data), cfg ): ok() else: err("Block proof verification failed (historical_summaries)") elif isShanghai(chainConfig, timestamp): + let summaries = v.getHistoricalSummaries().valueOr: + return err("No historical_summaries available for verification") + let proof = decodeSsz(proof.asSeq(), BlockProofHistoricalSummaries).valueOr: return err("Failed decoding historical_summaries based block proof: " & error) - if a.historicalSummaries.verifyProof( + if summaries.historical_summaries.verifyProof( proof, Digest(data: header.computeRlpHash().data), cfg ): ok() @@ -92,7 +104,7 @@ func verifyBlockHeaderProof*( let proof = decodeSsz(proof.asSeq(), BlockProofHistoricalRoots).valueOr: return err("Failed decoding historical_roots based block proof: " & error) - if a.historicalRoots.verifyProof(proof, Digest(data: header.computeRlpHash().data)): + if v.historicalRoots.verifyProof(proof, Digest(data: header.computeRlpHash().data)): ok() else: err("Block proof verification failed (historical roots)") @@ -101,22 +113,19 @@ func verifyBlockHeaderProof*( return err("Failed decoding historical hashes accumulator based block proof: " & error) - if a.historicalHashes.verifyProof(header, accumulatorProof): + if v.historicalHashes.verifyProof(header, accumulatorProof): ok() else: err("Block proof verification failed (historical hashes accumulator)") func validateCanonicalHeaderBytes*( - bytes: openArray[byte], - id: uint64 | Hash32, - accumulators: HistoryAccumulators, - cfg: RuntimeConfig, + bytes: openArray[byte], id: uint64 | Hash32, v: HeaderVerifier, cfg: RuntimeConfig ): Result[Header, string] = let headerWithProof = decodeSsz(bytes, BlockHeaderWithProof).valueOr: return err("Failed decoding header with proof: " & error) let header = ?validateHeaderBytes(headerWithProof.header.asSeq(), id) - ?accumulators.verifyBlockHeaderProof(header, headerWithProof.proof, cfg) + ?v.verifyBlockHeaderProof(header, headerWithProof.proof, cfg) ok(header) diff --git a/fluffy/portal_node.nim b/fluffy/portal_node.nim index b186389cbf..b4697f1046 100644 --- a/fluffy/portal_node.nim +++ b/fluffy/portal_node.nim @@ -134,6 +134,11 @@ proc new*( streamManager, networkData.metadata.cfg, accumulator, + beaconDbCache = + if beaconNetwork.isSome(): + beaconNetwork.value().beaconDb.beaconDbCache + else: + BeaconDbCache(), bootstrapRecords = bootstrapRecords, portalConfig = config.portalConfig, contentRequestRetries = config.contentRequestRetries, diff --git a/fluffy/tests/beacon_network_tests/test_beacon_network.nim b/fluffy/tests/beacon_network_tests/test_beacon_network.nim index 74d4d3ab5e..6a7b41cec9 100644 --- a/fluffy/tests/beacon_network_tests/test_beacon_network.nim +++ b/fluffy/tests/beacon_network_tests/test_beacon_network.nim @@ -220,6 +220,12 @@ procSuite "Beacon Network": historical_summaries: historical_summaries, proof: proof, ) + + # Note that this is not the slot deduced forkDigest, as that one would + # cause issues for this custom chain. + # TODO: If we were to encode the historical summaries in the db code + # it would fail due to slot based fork digest until we allow for + # custom networks. forkDigest = atConsensusFork(forkDigests, consensusFork) content = encodeSsz(historicalSummariesWithProof, forkDigest) diff --git a/fluffy/tests/history_network_tests/test_history_content.nim b/fluffy/tests/history_network_tests/test_history_content.nim index c0480e8914..0d99954b29 100644 --- a/fluffy/tests/history_network_tests/test_history_content.nim +++ b/fluffy/tests/history_network_tests/test_history_content.nim @@ -22,6 +22,9 @@ import from std/os import walkDir, splitFile, PathComponent +from ../../network/beacon/beacon_chain_historical_summaries import + HistoricalSummariesWithProof + suite "History Content Values": test "HeaderWithProof Building and Encoding": const @@ -39,7 +42,7 @@ suite "History Content Values": raiseAssert "Invalid epoch accumulator file: " & accumulatorFile blockHeadersWithProof = buildHeadersWithProof(blockHeaders, epochRecord).valueOr: raiseAssert "Could not build headers with proof" - accumulators = HistoryAccumulators(historicalHashes: loadAccumulator()) + accumulators = HeaderVerifier(historicalHashes: loadAccumulator()) networkData = loadNetworkData("mainnet") cfg = networkData.metadata.cfg @@ -99,10 +102,15 @@ suite "History Content Values": let content = YamlPortalContent.loadFromYaml(path).valueOr: raiseAssert "Invalid data file: " & error - accumulators = HistoryAccumulators( + verifier = HeaderVerifier( historicalHashes: loadAccumulator(), historicalRoots: loadHistoricalRoots(), - historicalSummaries: historicalSummaries, + beaconDbCache: BeaconDbCache( + historicalSummariesCache: Opt.some( + # Note: incomplete but sufficient for the test + HistoricalSummariesWithProof(historical_summaries: historicalSummaries) + ) + ), ) contentKeyEncoded = content.content_key.hexToSeqByte() contentValueEncoded = content.content_value.hexToSeqByte() @@ -120,7 +128,7 @@ suite "History Content Values": # Verifies if block header is canonical and if it matches the hash # of provided content key. check validateCanonicalHeaderBytes( - contentValueEncoded, contentKey.blockHeaderKey.blockHash, accumulators, cfg + contentValueEncoded, contentKey.blockHeaderKey.blockHash, verifier, cfg ) .isOk() From 59df5bdde4fb02f14e8d1f9a8a4d727c627792ce Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 13 May 2025 13:27:15 +0200 Subject: [PATCH 006/138] Fix number field not accessible when block header by hash (#3282) --- execution_chain/sync/wire_protocol/handler.nim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index 04a814c5a5..0a66a33f8a 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -152,10 +152,10 @@ proc getBlockHeaders*(ctx: EthWireRef, # EIP-4444 limit if chain.isHistoryExpiryActive: if req.reverse: - if req.startBlock.number > chain.portal.limit: + if header.number > chain.portal.limit: return move(list) else: - if req.startBlock.number + req.maxResults > chain.portal.limit: + if header.number + req.maxResults > chain.portal.limit: return move(list) totalBytes += getEncodedLength(header) From 9d896dc00a6cdcd807369a946f7a3879ded78539 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 14 May 2025 13:33:44 +0800 Subject: [PATCH 007/138] Bump nim-eth to 5957dce55a4bfe00899ecc14006f72c6608b43df (#3285) * Bump nim-eth to latest. --- vendor/nim-eth | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-eth b/vendor/nim-eth index 49bd8f5974..5957dce55a 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit 49bd8f59740049f6f90117c61cdb03fe727ddcdb +Subproject commit 5957dce55a4bfe00899ecc14006f72c6608b43df From 4c4f6d9d50e5d345f5a04907899732b019cac483 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 14 May 2025 15:11:09 +0800 Subject: [PATCH 008/138] Fluffy: Use neighboursInRange in neighborhoodGossip and filter out src node in handleFindContent (#3273) --- fluffy/network/wire/portal_protocol.nim | 55 +++++++++++++------ .../test_portal_wire_protocol.nim | 18 ++++-- 2 files changed, 51 insertions(+), 22 deletions(-) diff --git a/fluffy/network/wire/portal_protocol.nim b/fluffy/network/wire/portal_protocol.nim index 3b798530bd..cb40901f2b 100644 --- a/fluffy/network/wire/portal_protocol.nim +++ b/fluffy/network/wire/portal_protocol.nim @@ -360,11 +360,6 @@ func getNode*(p: PortalProtocol, id: NodeId): Opt[Node] = func localNode*(p: PortalProtocol): Node = p.baseProtocol.localNode -template neighbours*( - p: PortalProtocol, id: NodeId, k: int = BUCKET_SIZE, seenOnly = false -): seq[Node] = - p.routingTable.neighbours(id, k, seenOnly) - func distance(p: PortalProtocol, a, b: NodeId): UInt256 = p.routingTable.distance(a, b) @@ -380,6 +375,34 @@ func inRange( template inRange*(p: PortalProtocol, contentId: ContentId): bool = p.inRange(p.localNode.id, p.dataRadius(), contentId) +func neighbours*( + p: PortalProtocol, + id: NodeId, + k: int = BUCKET_SIZE, + seenOnly = false, + excluding = initHashSet[NodeId](), +): seq[Node] = + func nodeNotExcluded(nodeId: NodeId): bool = + not excluding.contains(nodeId) + + p.routingTable.neighbours(id, k, seenOnly, nodeNotExcluded) + +func neighboursInRange*( + p: PortalProtocol, + id: ContentId, + k: int = BUCKET_SIZE, + seenOnly = false, + excluding = initHashSet[NodeId](), +): seq[Node] = + func nodeNotExcludedAndInRange(nodeId: NodeId): bool = + if excluding.contains(nodeId): + return false + let radius = p.radiusCache.get(nodeId).valueOr: + return false + p.inRange(nodeId, radius, id) + + p.routingTable.neighbours(id, k, seenOnly, nodeNotExcludedAndInRange) + func truncateEnrs( nodes: seq[Node], maxSize: int, enrOverhead: int ): List[ByteList[2048], 32] = @@ -542,8 +565,10 @@ proc handleFindContent( # Node does not have the content, or content is not even in radius, # send closest neighbours to the requested content id. + let - closestNodes = p.neighbours(NodeId(contentId), seenOnly = true) + closestNodes = + p.neighbours(contentId, seenOnly = true, excluding = toHashSet([srcId])) enrs = truncateEnrs(closestNodes, maxPayloadSize, enrOverhead) portal_content_enrs_packed.observe(enrs.len().int64, labelValues = [$p.protocolId]) @@ -1751,28 +1776,24 @@ proc neighborhoodGossip*( # table, but at the same time avoid unnecessary node lookups. # It might still cause issues in data getting propagated in a wider id range. + var excluding: HashSet[NodeId] + if srcNodeId.isSome(): + excluding.incl(srcNodeId.get()) + var closestLocalNodes = - p.routingTable.neighbours(NodeId(contentId), BUCKET_SIZE, seenOnly = true) + p.neighboursInRange(contentId, BUCKET_SIZE, seenOnly = true, excluding) # Shuffling the order of the nodes in order to not always hit the same node # first for the same request. p.baseProtocol.rng[].shuffle(closestLocalNodes) - var gossipNodes: seq[Node] - for node in closestLocalNodes: - let radius = p.radiusCache.get(node.id).valueOr: - continue - if p.inRange(node.id, radius, contentId): - if srcNodeId.isNone() or node.id != srcNodeId.get(): - gossipNodes.add(node) - var numberOfGossipedNodes = 0 - if not enableNodeLookup or gossipNodes.len() >= p.config.maxGossipNodes: + if not enableNodeLookup or closestLocalNodes.len() >= p.config.maxGossipNodes: # use local nodes for gossip portal_gossip_without_lookup.inc(labelValues = [$p.protocolId]) - for node in gossipNodes: + for node in closestLocalNodes: let req = OfferRequest(dst: node, kind: Direct, contentList: contentList) await p.offerQueue.addLast(req) inc numberOfGossipedNodes diff --git a/fluffy/tests/wire_protocol_tests/test_portal_wire_protocol.nim b/fluffy/tests/wire_protocol_tests/test_portal_wire_protocol.nim index 3ac303fc24..be3d5a93bc 100644 --- a/fluffy/tests/wire_protocol_tests/test_portal_wire_protocol.nim +++ b/fluffy/tests/wire_protocol_tests/test_portal_wire_protocol.nim @@ -157,16 +157,24 @@ procSuite "Portal Wire Protocol Tests": await proto2.stopPortalProtocol() asyncTest "FindContent/Content - send enrs": - let (proto1, proto2) = defaultTestSetup(rng) + let + proto1 = initPortalProtocol(rng, PrivateKey.random(rng[]), localAddress(20402)) + proto2 = initPortalProtocol(rng, PrivateKey.random(rng[]), localAddress(20403)) + proto3 = initPortalProtocol(rng, PrivateKey.random(rng[]), localAddress(20404)) + + # Make node1 know about node2, and node2 about node3 + check proto1.addNode(proto2.localNode) == Added + check proto2.addNode(proto3.localNode) == Added - # ping in one direction to add, ping in the other to update as seen. - check (await proto1.baseProtocol.ping(proto2.localNode)).isOk() - check (await proto2.baseProtocol.ping(proto1.localNode)).isOk() + # node1 needs to know the radius of the nodes to determine if they are + # interested in content, so a ping is done. + check (await proto1.ping(proto2.localNode)).isOk() + check (await proto2.ping(proto3.localNode)).isOk() let contentKey = ContentKeyByteList.init(@[1'u8]) # content does not exist so this should provide us with the closest nodes - # to the content, which is the only node in the routing table. + # to the content, which should only be node 3 because node 1 should be excluded let content = await proto1.findContentImpl(proto2.localNode, contentKey) check: From 6341f5f0369b0c746a0653747d6b528a932b8705 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 14 May 2025 15:23:33 +0800 Subject: [PATCH 009/138] Fluffy: Update the state bridge to send each content offer to the closest connected portal client (#3278) * Refactor state bridge to support sending each content to any of the connected portal clients sorted by distance from the content key. --- .../portal_bridge/portal_bridge_state.nim | 278 ++++++++++-------- 1 file changed, 152 insertions(+), 126 deletions(-) diff --git a/fluffy/tools/portal_bridge/portal_bridge_state.nim b/fluffy/tools/portal_bridge/portal_bridge_state.nim index 80cfb49903..12a9f8aab8 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_state.nim +++ b/fluffy/tools/portal_bridge/portal_bridge_state.nim @@ -37,7 +37,7 @@ type stateRoot: Hash32 stateDiffs: seq[TransactionDiff] - BlockOffersRef = ref object + BlockOffers = ref object blockNumber: uint64 accountTrieOffers: seq[AccountTrieOfferWithKey] contractTrieOffers: seq[ContractTrieOfferWithKey] @@ -45,10 +45,9 @@ type PortalStateGossipWorker = ref object id: int - portalClient: RpcClient - portalUrl: JsonRpcUrl - nodeId: NodeId - blockOffersQueue: AsyncQueue[BlockOffersRef] + portalClients: OrderedTable[NodeId, RpcClient] + portalEndpoints: seq[(JsonRpcUrl, NodeId)] + blockOffersQueue: AsyncQueue[BlockOffers] gossipBlockOffersLoop: Future[void] PortalStateBridge* = ref object @@ -56,7 +55,7 @@ type web3Url: JsonRpcUrl db: DatabaseRef blockDataQueue: AsyncQueue[BlockData] - blockOffersQueue: AsyncQueue[BlockOffersRef] + blockOffersQueue: AsyncQueue[BlockOffers] gossipWorkers: seq[PortalStateGossipWorker] collectBlockDataLoop: Future[void] buildBlockOffersLoop: Future[void] @@ -92,27 +91,6 @@ proc putLastPersistedBlockNumber(db: DatabaseRef, blockNumber: uint64) {.inline. if blockNumber > db.getLastPersistedBlockNumber().valueOr(0): db.put(rlp.encode("lastPersistedBlockNumber"), rlp.encode(blockNumber)) -proc collectOffer( - offersMap: OrderedTableRef[seq[byte], seq[byte]], - offerWithKey: - AccountTrieOfferWithKey | ContractTrieOfferWithKey | ContractCodeOfferWithKey, -) {.inline.} = - let keyBytes = offerWithKey.key.toContentKey().encode().asSeq() - offersMap[keyBytes] = offerWithKey.offer.encode() - -proc recursiveCollectOffer( - offersMap: OrderedTableRef[seq[byte], seq[byte]], - offerWithKey: AccountTrieOfferWithKey | ContractTrieOfferWithKey, -) = - offersMap.collectOffer(offerWithKey) - - # root node, recursive collect is finished - if offerWithKey.key.path.unpackNibbles().len() == 0: - return - - # continue the recursive collect - offersMap.recursiveCollectOffer(offerWithKey.getParent()) - proc runCollectBlockDataLoop( bridge: PortalStateBridge, startBlockNumber: uint64 ) {.async: (raises: []).} = @@ -237,7 +215,7 @@ proc runBuildBlockOffersLoop( builder.buildBlockOffers() await bridge.blockOffersQueue.addLast( - BlockOffersRef( + BlockOffers( blockNumber: 0.uint64, accountTrieOffers: builder.getAccountTrieOffers(), contractTrieOffers: builder.getContractTrieOffers(), @@ -284,7 +262,7 @@ proc runBuildBlockOffersLoop( builder.buildBlockOffers() await bridge.blockOffersQueue.addLast( - BlockOffersRef( + BlockOffers( blockNumber: blockData.blockNumber, accountTrieOffers: builder.getAccountTrieOffers(), contractTrieOffers: builder.getContractTrieOffers(), @@ -299,7 +277,107 @@ proc runBuildBlockOffersLoop( except CancelledError: trace "buildBlockOffersLoop canceled" -proc runGossipBlockOffersLoop( +proc collectOffer( + offersMap: OrderedTableRef[seq[byte], seq[byte]], + offerWithKey: + AccountTrieOfferWithKey | ContractTrieOfferWithKey | ContractCodeOfferWithKey, +) {.inline.} = + let keyBytes = offerWithKey.key.toContentKey().encode().asSeq() + offersMap[keyBytes] = offerWithKey.offer.encode() + +proc recursiveCollectOffer( + offersMap: OrderedTableRef[seq[byte], seq[byte]], + offerWithKey: AccountTrieOfferWithKey | ContractTrieOfferWithKey, +) = + offersMap.collectOffer(offerWithKey) + + # root node, recursive collect is finished + if offerWithKey.key.path.unpackNibbles().len() == 0: + return + + # continue the recursive collect + offersMap.recursiveCollectOffer(offerWithKey.getParent()) + +func buildOffersMap(blockOffers: BlockOffers): auto = + let offersMap = newOrderedTable[seq[byte], seq[byte]]() + + for offerWithKey in blockOffers.accountTrieOffers: + offersMap.recursiveCollectOffer(offerWithKey) + for offerWithKey in blockOffers.contractTrieOffers: + offersMap.recursiveCollectOffer(offerWithKey) + for offerWithKey in blockOffers.contractCodeOffers: + offersMap.collectOffer(offerWithKey) + + offersMap + +proc orderPortalClientsByDistanceFromContent( + worker: PortalStateGossipWorker, contentKey: seq[byte] +) = + let contentId = ContentKeyByteList.init(contentKey).toContentId() + + # Closure to sort the portal clients using their nodeIds + # and comparing them to the contentId to be gossipped + proc portalClientsCmp(x, y: (NodeId, RpcClient)): int = + let + xDistance = contentId xor x[0] + yDistance = contentId xor y[0] + + if xDistance == yDistance: + 0 + elif xDistance > yDistance: + 1 + else: + -1 + + # Sort the portalClients based on distance from the content so that + # we gossip each piece of content to the closest node first + worker.portalClients.sort(portalClientsCmp) + +proc contentFoundInNetwork( + worker: PortalStateGossipWorker, contentKey: seq[byte] +): Future[bool] {.async: (raises: [CancelledError]).} = + for nodeId, client in worker.portalClients: + try: + let contentInfo = await client.portal_stateGetContent(contentKey.to0xHex()) + if contentInfo.content.len() > 0: + trace "Found existing content in network", + contentKey = contentKey.to0xHex(), nodeId, workerId = worker.id + return true + except CancelledError as e: + raise e + except CatchableError as e: + debug "Unable to find existing content in network", + contentKey = contentKey.to0xHex(), nodeId, error = e.msg, workerId = worker.id + return false + +proc gossipContentIntoNetwork( + worker: PortalStateGossipWorker, + minGossipPeers: int, + contentKey: seq[byte], + contentOffer: seq[byte], +): Future[bool] {.async: (raises: [CancelledError]).} = + for nodeId, client in worker.portalClients: + try: + let + putContentResult = await client.portal_statePutContent( + contentKey.to0xHex(), contentOffer.to0xHex() + ) + numPeers = putContentResult.peerCount + if numPeers >= minGossipPeers: + trace "Offer successfully gossipped to peers", + contentKey = contentKey.to0xHex(), nodeId, numPeers, workerId = worker.id + return true + else: + warn "Offer not gossiped to enough peers", + contentKey = contentKey.to0xHex(), nodeId, numPeers, workerId = worker.id + except CancelledError as e: + raise e + except CatchableError as e: + error "Failed to gossip offer to peers", + contentKey = contentKey.to0xHex(), nodeId, error = e.msg, workerId = worker.id + return false + +proc runGossipLoop( worker: PortalStateGossipWorker, verifyGossip: bool, skipGossipForExisting: bool, @@ -308,106 +386,52 @@ proc runGossipBlockOffersLoop( debug "Starting gossip block offers loop", workerId = worker.id try: - # Create one client per worker in order to improve performance. + # Create separate clients in each worker in order to improve performance. # WebSocket connections don't perform well when shared by many # concurrent workers. - worker.portalClient = newRpcClientConnect(worker.portalUrl) + for (rpcUrl, nodeId) in worker.portalEndpoints: + worker.portalClients[nodeId] = newRpcClientConnect(rpcUrl) - var blockOffers = await worker.blockOffersQueue.popFirst() - - while true: + var + blockOffers = await worker.blockOffersQueue.popFirst() # A table of offer key, value pairs is used to filter out duplicates so # that we don't gossip the same offer multiple times. - let offersMap = newOrderedTable[seq[byte], seq[byte]]() - - for offerWithKey in blockOffers.accountTrieOffers: - offersMap.recursiveCollectOffer(offerWithKey) - for offerWithKey in blockOffers.contractTrieOffers: - offersMap.recursiveCollectOffer(offerWithKey) - for offerWithKey in blockOffers.contractCodeOffers: - offersMap.collectOffer(offerWithKey) - - # We need to use a closure here because nodeId is required to calculate the - # distance of each content id from the node - proc offersMapCmp(x, y: (seq[byte], seq[byte])): int = - let - xId = ContentKeyByteList.init(x[0]).toContentId() - yId = ContentKeyByteList.init(y[0]).toContentId() - xDistance = worker.nodeId xor xId - yDistance = worker.nodeId xor yId - - if xDistance == yDistance: - 0 - elif xDistance > yDistance: - 1 - else: - -1 - - # Sort the offers based on the distance from the node so that we will gossip - # content that is closest to the node first - offersMap.sort(offersMapCmp) + offersMap = buildOffersMap(blockOffers) + while true: var retryGossip = false - for k, v in offersMap: + + for contentKey, contentOffer in offersMap: + worker.orderPortalClientsByDistanceFromContent(contentKey) + # Check if we need to gossip the content - var gossipContent = true - - if skipGossipForExisting: - try: - let contentInfo = - await worker.portalClient.portal_stateGetContent(k.to0xHex()) - if contentInfo.content.len() > 0: - gossipContent = false - except CancelledError as e: - raise e - except CatchableError as e: - debug "Unable to find existing content. Will attempt to gossip content: ", - contentKey = k.to0xHex(), error = e.msg, workerId = worker.id + if skipGossipForExisting and (await worker.contentFoundInNetwork(contentKey)): + continue # move on to the next content key # Gossip the content into the network - if gossipContent: - try: - let - putContentResult = await worker.portalClient.portal_statePutContent( - k.to0xHex(), v.to0xHex() - ) - numPeers = putContentResult.peerCount - if numPeers >= minGossipPeers: - debug "Offer successfully gossipped to peers", - numPeers, workerId = worker.id - else: - warn "Offer not gossiped to enough peers", numPeers, workerId = worker.id - retryGossip = true - break - except CancelledError as e: - raise e - except CatchableError as e: - error "Failed to gossip offer to peers", error = e.msg, workerId = worker.id - retryGossip = true - break + let gossipCompleted = await worker.gossipContentIntoNetwork( + minGossipPeers, contentKey, contentOffer + ) + if not gossipCompleted: + # Retry gossip of this block + retryGossip = true + break # Check if the content can be found in the network var foundContentKeys = newSeq[seq[byte]]() if verifyGossip and not retryGossip: - # wait for the peers to be updated + # Wait for the peers to be updated. + # Wait time is proportional to the number of offers let waitTimeMs = 200 + (offersMap.len() * 20) await sleepAsync(waitTimeMs.milliseconds) - # wait time is proportional to the number of offers - - for k, _ in offersMap: - try: - let contentInfo = - await worker.portalClient.portal_stateGetContent(k.to0xHex()) - if contentInfo.content.len() == 0: - error "Found empty contentValue", workerId = worker.id - retryGossip = true - break - foundContentKeys.add(k) - except CancelledError as e: - raise e - except CatchableError as e: - warn "Unable to find content with key. Will retry gossipping content:", - contentKey = k.to0xHex(), error = e.msg, workerId = worker.id + + for contentKey, _ in offersMap: + worker.orderPortalClientsByDistanceFromContent(contentKey) + + if await worker.contentFoundInNetwork(contentKey): + foundContentKeys.add(contentKey) + else: + # Retry gossip of this block retryGossip = true break @@ -418,13 +442,17 @@ proc runGossipBlockOffersLoop( # Don't retry gossip for content that was found in the network for key in foundContentKeys: offersMap.del(key) + warn "Retrying state gossip for block: ", blockNumber = blockOffers.blockNumber, remainingOffers = offersMap.len(), workerId = worker.id - # We might need to reconnect if using a WebSocket client - await worker.portalClient.tryReconnect(worker.portalUrl) + # We might need to reconnect if using WebSocket clients + for (rpcUrl, nodeId) in worker.portalEndpoints: + await worker.portalClients.getOrDefault(nodeId).tryReconnect(rpcUrl) + + # Jump back to the top of while loop to retry processing the current block continue if blockOffers.blockNumber mod 1000 == 0: @@ -439,6 +467,7 @@ proc runGossipBlockOffersLoop( workerId = worker.id blockOffers = await worker.blockOffersQueue.popFirst() + offersMap = buildOffersMap(blockOffers) except CancelledError: trace "gossipBlockOffersLoop canceled" @@ -528,7 +557,7 @@ proc start*(bridge: PortalStateBridge, config: PortalBridgeConf) = info "Starting concurrent gossip workers", workerCount = bridge.gossipWorkers.len() for worker in bridge.gossipWorkers: - worker.gossipBlockOffersLoop = worker.runGossipBlockOffersLoop( + worker.gossipBlockOffersLoop = worker.runGossipLoop( config.verifyGossip, config.skipGossipForExisting, config.minGossipPeers.int ) @@ -580,19 +609,16 @@ proc runState*( web3Url: config.web3RpcUrl, db: db, blockDataQueue: newAsyncQueue[BlockData](queueSize), - blockOffersQueue: newAsyncQueue[BlockOffersRef](queueSize), + blockOffersQueue: newAsyncQueue[BlockOffers](queueSize), gossipWorkers: newSeq[PortalStateGossipWorker](), ) for i in 0 ..< config.gossipWorkers.int: - let - (rpcUrl, nodeId) = portalEndpoints[i mod config.portalRpcEndpoints.int] - worker = PortalStateGossipWorker( - id: i + 1, - portalUrl: rpcUrl, - nodeId: nodeId, - blockOffersQueue: bridge.blockOffersQueue, - ) + let worker = PortalStateGossipWorker( + id: i + 1, + portalEndpoints: portalEndpoints, + blockOffersQueue: bridge.blockOffersQueue, + ) bridge.gossipWorkers.add(worker) bridge.start(config) From 152f3cdd05c228f7875fdd538ca923f3527b62db Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Thu, 15 May 2025 07:53:53 +0200 Subject: [PATCH 010/138] Track missing vertex lookups in metrics (#3277) When looking up a VertexID, the entry might not be present in the database - this is currently not tracked since the functionality is not commonly used - with path-based vertex id generation, we'll be making guesses however where empty lookups become "normal" - the same would happen for incomplete databases as well. --- .../db/aristo/aristo_desc/desc_structural.nim | 5 +++-- .../db/aristo/aristo_init/memory_db.nim | 2 +- execution_chain/db/aristo/aristo_init/rocks_db.nim | 2 +- .../db/aristo/aristo_init/rocks_db/rdb_desc.nim | 14 +++++++++++++- .../db/aristo/aristo_init/rocks_db/rdb_get.nim | 13 ++++++++----- .../db/aristo/aristo_init/rocks_db/rdb_init.nim | 2 +- .../db/aristo/aristo_walk/memory_only.nim | 2 +- .../db/aristo/aristo_walk/persistent.nim | 2 +- 8 files changed, 29 insertions(+), 13 deletions(-) diff --git a/execution_chain/db/aristo/aristo_desc/desc_structural.nim b/execution_chain/db/aristo/aristo_desc/desc_structural.nim index 0572c9813c..0ff7d2bbdb 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_structural.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_structural.nim @@ -90,8 +90,9 @@ type ## unfriendly to caches const - Leaves* = {AccLeaf, StoLeaf} - Branches* = {Branch, ExtBranch} + Leaves* = {VertexType.AccLeaf, VertexType.StoLeaf} + Branches* = {VertexType.Branch, VertexType.ExtBranch} + VertexTypes* = Leaves + Branches # ------------------------------------------------------------------------------ # Public helpers (misc) diff --git a/execution_chain/db/aristo/aristo_init/memory_db.nim b/execution_chain/db/aristo/aristo_init/memory_db.nim index 4c9bd40df3..1e0379a4ef 100644 --- a/execution_chain/db/aristo/aristo_init/memory_db.nim +++ b/execution_chain/db/aristo/aristo_init/memory_db.nim @@ -208,7 +208,7 @@ func memoryBackend*(): AristoDbRef = iterator walkVtx*( be: MemBackendRef; - kinds = {Branch, ExtBranch, AccLeaf, StoLeaf}; + kinds = VertexTypes; ): tuple[rvid: RootedVertexID, vtx: VertexRef] = ## Iteration over the vertex sub-table. for n,rvid in be.sTab.keys.toSeq.sorted: diff --git a/execution_chain/db/aristo/aristo_init/rocks_db.nim b/execution_chain/db/aristo/aristo_init/rocks_db.nim index bc6e7c3442..240c6aa4e2 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db.nim @@ -245,7 +245,7 @@ proc rocksDbBackend*( iterator walkVtx*( be: RdbBackendRef; - kinds = {Branch, ExtBranch, AccLeaf, StoLeaf}; + kinds = VertexTypes; ): tuple[evid: RootedVertexID, vtx: VertexRef] = ## Variant of `walk()` iteration over the vertex sub-table. for (rvid, vtx) in be.rdb.walkVtx(kinds): diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim index 9517e44efa..612a7276f5 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim @@ -73,6 +73,12 @@ type Account World + RdbVertexType* = enum + Empty + Leaf + Branch + ExtBranch + var # Hit/miss counters for LRU cache - global so as to integrate easily with # nim-metrics and `uint64` to ensure that increasing them is fast - collection @@ -80,7 +86,7 @@ var # TODO maybe turn this into more general framework for LRU reporting since # we have lots of caches of this sort rdbBranchLruStats*: array[RdbStateType, RdbLruCounter] - rdbVtxLruStats*: array[RdbStateType, array[VertexType, RdbLruCounter]] + rdbVtxLruStats*: array[RdbStateType, array[RdbVertexType, RdbLruCounter]] rdbKeyLruStats*: array[RdbStateType, RdbLruCounter] # ------------------------------------------------------------------------------ @@ -93,6 +99,12 @@ template toOpenArray*(xid: AdminTabID): openArray[byte] = template to*(v: RootedVertexID, T: type RdbStateType): RdbStateType = if v.root == VertexID(1): RdbStateType.World else: RdbStateType.Account +template to*(v: VertexType, T: type RdbVertexType): RdbVertexType = + case v + of VertexType.AccLeaf, VertexType.StoLeaf: RdbVertexType.Leaf + of VertexType.Branch: RdbVertexType.Branch + of VertexType.ExtBranch: RdbVertexType.ExtBranch + template inc*(v: var RdbLruCounter, hit: bool) = discard v[hit].fetchAdd(1, moRelaxed) diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim index 4c058c7082..15ec67e5a2 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim @@ -63,7 +63,7 @@ when defined(metrics): # We don't care about synchronization between each type of metric or between # the metrics thread and others since small differences like this don't matter for state in RdbStateType: - for vtype in VertexType: + for vtype in RdbVertexType: for hit in [false, true]: output( name = "aristo_rdb_vtx_lru_total", @@ -198,7 +198,9 @@ proc getVtx*( rdb.rdVtxLru.get(rvid.vid) if rc.isOk: - rdbVtxLruStats[rvid.to(RdbStateType)][rc.value().vType].inc(true) + rdbVtxLruStats[rvid.to(RdbStateType)][rc.value().vType.to(RdbVertexType)].inc( + true + ) return ok(move(rc.value)) # Otherwise fetch from backend database @@ -214,8 +216,7 @@ proc getVtx*( return err((errSym, error)) if not gotData: - # As a hack, we count missing data as leaf nodes - rdbVtxLruStats[rvid.to(RdbStateType)][VertexType.StoLeaf].inc(false) + rdbVtxLruStats[rvid.to(RdbStateType)][RdbVertexType.Empty].inc(false) return ok(VertexRef(nil)) if res.isErr(): @@ -224,7 +225,9 @@ proc getVtx*( if res.value.vType == Branch: rdbBranchLruStats[rvid.to(RdbStateType)].inc(false) else: - rdbVtxLruStats[rvid.to(RdbStateType)][res.value().vType].inc(false) + rdbVtxLruStats[rvid.to(RdbStateType)][res.value().vType.to(RdbVertexType)].inc( + false + ) # Update cache and return - in peek mode, avoid evicting cache items if GetVtxFlag.PeekCache notin flags: diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim index 90dbbc9210..b875846d58 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim @@ -27,7 +27,7 @@ proc dumpCacheStats(keySize, vtxSize, branchSize: int) = echo "vtxLru(", vtxSize, ")" echo " state vtype miss hit total hitrate" for state in RdbStateType: - for vtype in VertexType: + for vtype in RdbVertexType: let (miss, hit) = ( rdbVtxLruStats[state][vtype].get(false), diff --git a/execution_chain/db/aristo/aristo_walk/memory_only.nim b/execution_chain/db/aristo/aristo_walk/memory_only.nim index 06e573351a..cca6a567a4 100644 --- a/execution_chain/db/aristo/aristo_walk/memory_only.nim +++ b/execution_chain/db/aristo/aristo_walk/memory_only.nim @@ -29,7 +29,7 @@ export iterator walkVtxBe*[T: MemBackendRef]( _: type T; db: AristoDbRef; - kinds = {Branch, ExtBranch, AccLeaf, StoLeaf}; + kinds = VertexTypes; ): tuple[rvid: RootedVertexID, vtx: VertexRef] = ## Iterate over filtered memory backend or backend-less vertices. This ## function depends on the particular backend type name which must match diff --git a/execution_chain/db/aristo/aristo_walk/persistent.nim b/execution_chain/db/aristo/aristo_walk/persistent.nim index 61fc4421c7..d09698dfcd 100644 --- a/execution_chain/db/aristo/aristo_walk/persistent.nim +++ b/execution_chain/db/aristo/aristo_walk/persistent.nim @@ -34,7 +34,7 @@ export iterator walkVtxBe*[T: RdbBackendRef]( _: type T; db: AristoDbRef; - kinds = {Branch, ExtBranch, AccLeaf, StoLeaf}; + kinds = VertexTypes; ): tuple[rvid: RootedVertexID, vtx: VertexRef] = ## Iterate over RocksDB backend vertices. This function depends on ## the particular backend type name which must match the backend descriptor. From 443da8e7cb0f8c96fb0ac6468aa0f35c0755daf6 Mon Sep 17 00:00:00 2001 From: andri lim Date: Thu, 15 May 2025 14:58:52 +0700 Subject: [PATCH 011/138] Support multiple wire protocol version and implement eth/68 + eth/69 (#3283) * Remove old wire protocol implementation * eth68 status isolated * eth69 preparation * Fix typo * Register protocol * Add BlockRangeUpdate * Use new receipt format for eth69 * Fix tests * Update wire protocol setup * Update syncer addObserver * Update peer observer * Handle blockRangeUpdate using peer state * Add receipt69 roundtrip test * Replace Receipt69 with StoredReceipt from nim-eth * Bump nim-eth * Bump nim-eth to master branch --- execution_chain/networking/p2p_types.nim | 2 +- execution_chain/networking/peer_pool.nim | 18 +- execution_chain/networking/rlpx.nim | 33 +- .../sync/beacon/worker/start_stop.nim | 11 +- execution_chain/sync/peers.nim | 3 +- execution_chain/sync/sync_sched.nim | 7 +- execution_chain/sync/wire_protocol.nim | 3 - .../sync/wire_protocol/handler.nim | 43 ++- .../sync/wire_protocol/implementation.nim | 282 ------------------ .../sync/wire_protocol/requester.nim | 134 +++++++-- .../sync/wire_protocol/responder.nim | 251 +++++++++++----- execution_chain/sync/wire_protocol/setup.nim | 8 +- .../sync/wire_protocol/trace_config.nim | 14 +- execution_chain/sync/wire_protocol/types.nim | 19 +- hive_integration/nodocker/rpc/test_env.nim | 2 +- tests/test_rpc.nim | 2 +- vendor/nim-eth | 2 +- 17 files changed, 409 insertions(+), 425 deletions(-) delete mode 100644 execution_chain/sync/wire_protocol/implementation.nim diff --git a/execution_chain/networking/p2p_types.nim b/execution_chain/networking/p2p_types.nim index 2cac17f9bf..1d9e8dd237 100644 --- a/execution_chain/networking/p2p_types.nim +++ b/execution_chain/networking/p2p_types.nim @@ -78,7 +78,7 @@ type PeerObserver* = object onPeerConnected*: proc(p: Peer) {.gcsafe, raises: [].} onPeerDisconnected*: proc(p: Peer) {.gcsafe, raises: [].} - protocol*: ProtocolInfo + protocols*: seq[ProtocolInfo] Capability* = object name*: string diff --git a/execution_chain/networking/peer_pool.nim b/execution_chain/networking/peer_pool.nim index 789e69db1e..0ec5553dd3 100644 --- a/execution_chain/networking/peer_pool.nim +++ b/execution_chain/networking/peer_pool.nim @@ -87,7 +87,7 @@ proc addObserver*(p: PeerPool, observerId: int, observer: PeerObserver) = p.observers[observerId] = observer if not observer.onPeerConnected.isNil: for peer in p.connectedNodes.values: - if observer.protocol.isNil or peer.supports(observer.protocol): + if observer.protocols.len == 0 or peer.supports(observer.protocols): observer.onPeerConnected(peer) func delObserver*(p: PeerPool, observerId: int) = @@ -99,8 +99,8 @@ proc addObserver*(p: PeerPool, observerId: ref, observer: PeerObserver) = func delObserver*(p: PeerPool, observerId: ref) = p.delObserver(cast[int](observerId)) -template setProtocol*(observer: PeerObserver, Protocol: type) = - observer.protocol = Protocol.protocolInfo +template addProtocol*(observer: PeerObserver, Protocol: type) = + observer.protocols.add Protocol.protocolInfo proc stopAllPeers(p: PeerPool) {.async.} = debug "Stopping all peers ..." @@ -108,10 +108,6 @@ proc stopAllPeers(p: PeerPool) {.async.} = # await asyncio.gather( # *[peer.stop() for peer in self.connected_nodes.values()]) -# async def stop(self) -> None: -# self.cancel_token.trigger() -# await self.stop_all_peers() - proc connect(p: PeerPool, remote: Node): Future[Peer] {.async.} = ## Connect to the given remote and return a Peer instance when successful. ## Returns nil if the remote is unreachable, times out or is useless. @@ -166,10 +162,10 @@ proc addPeer*(pool: PeerPool, peer: Peer) {.gcsafe.} = doAssert(peer.remote notin pool.connectedNodes) pool.connectedNodes[peer.remote] = peer rlpx_connected_peers.inc() - for o in pool.observers.values: - if not o.onPeerConnected.isNil: - if o.protocol.isNil or peer.supports(o.protocol): - o.onPeerConnected(peer) + for observer in pool.observers.values: + if not observer.onPeerConnected.isNil: + if observer.protocols.len == 0 or peer.supports(observer.protocols): + observer.onPeerConnected(peer) proc connectToNode*(p: PeerPool, n: Node) {.async.} = let peer = await p.connect(n) diff --git a/execution_chain/networking/rlpx.nim b/execution_chain/networking/rlpx.nim index 24d7ae011a..b23ac52935 100644 --- a/execution_chain/networking/rlpx.nim +++ b/execution_chain/networking/rlpx.nim @@ -365,6 +365,11 @@ proc supports*(peer: Peer, Protocol: type): bool = ## Checks whether a Peer supports a particular protocol peer.supports(Protocol.protocolInfo) +proc supports*(peer: Peer, protos: openArray[ProtocolInfo]): bool = + for proto in protos: + if peer.supports(proto): + return true + template perPeerMsgId(peer: Peer, MsgType: type): uint64 = perPeerMsgIdImpl(peer, MsgType.msgProtocol.protocolInfo, MsgType.msgId) @@ -981,7 +986,7 @@ proc removePeer(network: EthereumNode, peer: Peer) = if not peer.dispatcher.isNil: for observer in network.peerPool.observers.values: if not observer.onPeerDisconnected.isNil: - if observer.protocol.isNil or peer.supports(observer.protocol): + if observer.protocols.len == 0 or peer.supports(observer.protocols): observer.onPeerDisconnected(peer) proc callDisconnectHandlers( @@ -1557,6 +1562,26 @@ template rlpxWithFutureHandler*(PROTO: distinct type; resolveResponseFuture(peer, perPeerMsgId, addr(packet), reqId) +template rlpxWithFutureHandler*(PROTO: distinct type; + MSGTYPE: distinct type; + PROTYPE: distinct type; + msgId: static[uint64]; + peer: Peer; + data: Rlp, + fields: untyped): untyped = + wrapRlpxWithPacketException(MSGTYPE, peer): + var + rlp = data + packet: MSGTYPE + + tryEnterList(rlp) + let + reqId = read(rlp, uint64) + perPeerMsgId = msgIdImpl(PROTO, peer, msgId) + checkedRlpFields(peer, rlp, packet, fields) + var proType = packet.to(PROTYPE) + resolveResponseFuture(peer, + perPeerMsgId, addr(proType), reqId) proc nextMsg*(PROTO: distinct type, peer: Peer, @@ -1595,6 +1620,9 @@ func initResponder*(peer: Peer, reqId: uint64): Responder = template state*(response: Responder, PROTO: type): auto = state(response.peer, PROTO) +template supports*(response: Responder, Protocol: type): bool = + response.peer.supports(Protocol.protocolInfo) + template networkState*(response: Responder, PROTO: type): auto = networkState(response.peer, PROTO) @@ -1618,6 +1646,9 @@ template defineProtocol*(PROTO: untyped, template NetworkState*(_: type PROTO): type = networkState + template protocolVersion*(_: type PROTO): int = + version + func initProtocol*(_: type PROTO): auto = initProtocol(rlpxName, version, diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index 68726c0705..d7e488655d 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -94,8 +94,15 @@ proc startBuddy*(buddy: BeaconBuddyRef): bool = let ctx = buddy.ctx peer = buddy.peer - if peer.supports(wire_protocol.eth) and - peer.state(wire_protocol.eth).initialized: + + if peer.supports(eth69) and + peer.state(eth69).initialized: + ctx.pool.nBuddies.inc + buddy.initHdrProcErrors() + return true + + if peer.supports(eth68) and + peer.state(eth68).initialized: ctx.pool.nBuddies.inc ctx.pool.blkLastSlowPeer = Opt.none(Hash) buddy.initHdrProcErrors() diff --git a/execution_chain/sync/peers.nim b/execution_chain/sync/peers.nim index 6cd4014064..7f152646e3 100644 --- a/execution_chain/sync/peers.nim +++ b/execution_chain/sync/peers.nim @@ -92,7 +92,8 @@ proc setupManager(pm: PeerManagerRef, enodes: openArray[ENode]) = pm.state = Running pm.reconnectFut = pm.runReconnectLoop() - po.setProtocol eth + po.addProtocol eth68 + po.addProtocol eth69 pm.pool.addObserver(pm, po) for enode in enodes: diff --git a/execution_chain/sync/sync_sched.nim b/execution_chain/sync/sync_sched.nim index 3fc78f547d..96328f14a1 100644 --- a/execution_chain/sync/sync_sched.nim +++ b/execution_chain/sync/sync_sched.nim @@ -88,7 +88,8 @@ import chronos, ../networking/[p2p, peer_pool], stew/keyed_queue, - ./sync_desc + ./sync_desc, + ./wire_protocol type ActiveBuddies[S,W] = ##\ @@ -503,8 +504,10 @@ proc startSync*[S,W](dsc: RunnerSyncRef[S,W]): bool = onPeerDisconnected: proc(p: Peer) {.gcsafe.} = dsc.onPeerDisconnected(p)) - po.setProtocol eth + po.addProtocol eth68 + po.addProtocol eth69 dsc.pool.addObserver(dsc, po) + asyncSpawn dsc.tickerLoop() return true diff --git a/execution_chain/sync/wire_protocol.nim b/execution_chain/sync/wire_protocol.nim index 254bf8be20..9a86ea5038 100644 --- a/execution_chain/sync/wire_protocol.nim +++ b/execution_chain/sync/wire_protocol.nim @@ -18,6 +18,3 @@ export responder, types, setup - -type - eth* = eth68 diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index 0a66a33f8a..7a6ec54adc 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -44,14 +44,14 @@ proc new*(_: type EthWireRef, # Public functions: eth wire protocol handlers # ------------------------------------------------------------------------------ -proc getStatus*(ctx: EthWireRef): EthState = +proc getStatus68*(ctx: EthWireRef): Eth68State = let com = ctx.chain.com bestBlock = ctx.chain.latestHeader txFrame = ctx.chain.baseTxFrame forkId = com.forkId(bestBlock.number, bestBlock.timestamp) - EthState( + Eth68State( totalDifficulty: txFrame.headTotalDifficulty, genesisHash: com.genesisHash, bestBlockHash: bestBlock.computeBlockHash, @@ -60,6 +60,23 @@ proc getStatus*(ctx: EthWireRef): EthState = forkNext: forkId.nextFork )) +proc getStatus69*(ctx: EthWireRef): Eth69State = + let + com = ctx.chain.com + bestBlock = ctx.chain.latestHeader + forkId = com.forkId(bestBlock.number, bestBlock.timestamp) + + Eth69State( + genesisHash: com.genesisHash, + forkId: ChainForkId( + forkHash: forkId.crc.toBytesBE, + forkNext: forkId.nextFork + ), + earliest: 0, + latest: bestBlock.number, + latestHash: bestBlock.computeBlockHash, + ) + proc getReceipts*(ctx: EthWireRef, hashes: openArray[Hash32]): seq[seq[Receipt]] = @@ -80,6 +97,26 @@ proc getReceipts*(ctx: EthWireRef, move(list) +proc getStoredReceipts*(ctx: EthWireRef, + hashes: openArray[Hash32]): + seq[seq[StoredReceipt]] = + var + list: seq[seq[StoredReceipt]] + totalBytes = 0 + + for blockHash in hashes: + var receiptList = ctx.chain.receiptsByBlockHash(blockHash).valueOr: + continue + + totalBytes += getEncodedLength(receiptList) + list.add(receiptList.to(seq[StoredReceipt])) + + if list.len >= MAX_RECEIPTS_SERVE or + totalBytes > SOFT_RESPONSE_LIMIT: + break + + move(list) + proc getPooledTransactions*(ctx: EthWireRef, hashes: openArray[Hash32]): seq[PooledTransaction] = @@ -127,7 +164,7 @@ proc getBlockBodies*(ctx: EthWireRef, if blk.header.number > ctx.chain.portal.limit: trace "handlers.getBlockBodies: blockBody older than expiry limit", blockHash continue - + totalBytes += getEncodedLength(blk.body) list.add blk.body diff --git a/execution_chain/sync/wire_protocol/implementation.nim b/execution_chain/sync/wire_protocol/implementation.nim deleted file mode 100644 index c71c3c23fb..0000000000 --- a/execution_chain/sync/wire_protocol/implementation.nim +++ /dev/null @@ -1,282 +0,0 @@ -# Nimbus - Ethereum Wire Protocol -# -# Copyright (c) 2018-2025 Status Research & Development GmbH -# Licensed under either of -# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or -# http://www.apache.org/licenses/LICENSE-2.0) -# * MIT license ([LICENSE-MIT](LICENSE-MIT) or -# http://opensource.org/licenses/MIT) -# at your option. This file may not be copied, modified, or distributed -# except according to those terms. - -## This module implements Ethereum Wire Protocol version 68, `eth/68`. -## Specification: -## `eth/68 `_ - -import - stint, - chronicles, - chronos, - eth/common, - stew/byteutils, - ./trace_config, - ./types, - ./handler, - ../../networking/[p2p, p2p_types], - ../../utils/utils, - ../../common/logging - -export - types - -logScope: - topics = "eth68" - -const - protocolVersion* = 68 - prettyEthProtoName* = "[eth/" & $protocolVersion & "]" - - # Pickeled tracer texts - trEthRecvReceived* = - "<< " & prettyEthProtoName & " Received " - trEthRecvReceivedBlockHeaders* = - trEthRecvReceived & "BlockHeaders (0x04)" - trEthRecvReceivedBlockBodies* = - trEthRecvReceived & "BlockBodies (0x06)" - - trEthRecvProtocolViolation* = - "<< " & prettyEthProtoName & " Protocol violation, " - trEthRecvError* = - "<< " & prettyEthProtoName & " Error " - trEthRecvTimeoutWaiting* = - "<< " & prettyEthProtoName & " Timeout waiting " - trEthRecvDiscarding* = - "<< " & prettyEthProtoName & " Discarding " - - trEthSendSending* = - ">> " & prettyEthProtoName & " Sending " - trEthSendSendingGetBlockHeaders* = - trEthSendSending & "GetBlockHeaders (0x03)" - trEthSendSendingGetBlockBodies* = - trEthSendSending & "GetBlockBodies (0x05)" - - trEthSendReplying* = - ">> " & prettyEthProtoName & " Replying " - - trEthSendDelaying* = - ">> " & prettyEthProtoName & " Delaying " - - trEthRecvNewBlock* = - "<< " & prettyEthProtoName & " Received NewBlock" - trEthRecvNewBlockHashes* = - "<< " & prettyEthProtoName & " Received NewBlockHashes" - trEthSendNewBlock* = - ">> " & prettyEthProtoName & " Sending NewBlock" - trEthSendNewBlockHashes* = - ">> " & prettyEthProtoName & " Sending NewBlockHashes" - -when trEthTraceGossipOk: - import std/[sequtils,strutils] - - func toStr(w: openArray[int]): string = - func toStr(n: int): string = - if n == 0: "0" - else: n.toHex.strip(trailing=false,chars={'0'}).toLowerAscii - w.mapIt(it.toStr).join(":") - -p2pProtocol eth68(version = protocolVersion, - rlpxName = "eth", - peerState = EthPeerState, - networkState = EthWireRef, - useRequestIds = true): - - onPeerConnected do (peer: Peer): - let - network = peer.network - ctx = peer.networkState - status = ctx.getStatus() - - trace trEthSendSending & "Status (0x00)", peer, - td = status.totalDifficulty, - bestHash = short(status.bestBlockHash), - networkId = network.networkId, - genesis = short(status.genesisHash), - forkHash = status.forkId.forkHash.toHex, - forkNext = status.forkId.forkNext - - let m = await peer.status(protocolVersion, - network.networkId, - status.totalDifficulty, - status.bestBlockHash, - status.genesisHash, - status.forkId, - timeout = chronos.seconds(10)) - - when trEthTraceHandshakesOk: - trace "Handshake: Local and remote networkId", - local=network.networkId, remote=m.networkId - trace "Handshake: Local and remote genesisHash", - local=short(status.genesisHash), remote=short(m.genesisHash) - trace "Handshake: Local and remote forkId", - local=(status.forkId.forkHash.toHex & "/" & $status.forkId.forkNext), - remote=(m.forkId.forkHash.toHex & "/" & $m.forkId.forkNext) - - if m.networkId != network.networkId: - trace "Peer for a different network (networkId)", peer, - expectNetworkId=network.networkId, gotNetworkId=m.networkId - raise newException( - UselessPeerError, "Eth handshake for different network") - - if m.genesisHash != status.genesisHash: - trace "Peer for a different network (genesisHash)", peer, - expectGenesis=short(status.genesisHash), gotGenesis=short(m.genesisHash) - raise newException( - UselessPeerError, "Eth handshake for different network") - - trace "Peer matches our network", peer - peer.state.initialized = true - peer.state.bestDifficulty = m.totalDifficulty - peer.state.bestBlockHash = m.bestHash - - handshake: - # User message 0x00: Status. - proc status(peer: Peer, - ethVersionArg: uint64, - networkId: NetworkId, - totalDifficulty: DifficultyInt, - bestHash: Hash32, - genesisHash: Hash32, - forkId: ChainForkId) = - trace trEthRecvReceived & "Status (0x00)", peer, - networkId, totalDifficulty, bestHash=short(bestHash), genesisHash=short(genesisHash), - forkHash=forkId.forkHash.toHex, forkNext=forkId.forkNext - - # User message 0x01: NewBlockHashes. - proc newBlockHashes(peer: Peer, hashes: openArray[NewBlockHashesAnnounce]) = - when trEthTraceGossipOk: - trace trEthRecvReceived & "NewBlockHashes (0x01)", peer, - hashes=hashes.len - - raise newException(EthP2PError, "block broadcasts disallowed") - - # User message 0x02: Transactions. - proc transactions(peer: Peer, transactions: openArray[Transaction]) = - when trEthTraceGossipOk: - trace trEthRecvReceived & "Transactions (0x02)", peer, - transactions=transactions.len - - let ctx = peer.networkState() - ctx.handleAnnouncedTxs(transactions) - - requestResponse: - # User message 0x03: GetBlockHeaders. - proc getBlockHeaders(peer: Peer, request: BlockHeadersRequest) = - when trEthTracePacketsOk: - trace trEthRecvReceived & "GetBlockHeaders (0x03)", peer, - count=request.maxResults - - let ctx = peer.networkState() - let headers = ctx.getBlockHeaders(request) - if headers.len > 0: - trace trEthSendReplying & "with BlockHeaders (0x04)", peer, - sent=headers.len, requested=request.maxResults - else: - trace trEthSendReplying & "EMPTY BlockHeaders (0x04)", peer, - sent=0, requested=request.maxResults - - await response.send(headers) - - # User message 0x04: BlockHeaders. - proc blockHeaders(p: Peer, headers: openArray[Header]) - - requestResponse: - # User message 0x05: GetBlockBodies. - proc getBlockBodies(peer: Peer, hashes: openArray[Hash32]) = - trace trEthRecvReceived & "GetBlockBodies (0x05)", peer, - hashes=hashes.len - - let ctx = peer.networkState() - let bodies = ctx.getBlockBodies(hashes) - if bodies.len > 0: - trace trEthSendReplying & "with BlockBodies (0x06)", peer, - sent=bodies.len, requested=hashes.len - else: - trace trEthSendReplying & "EMPTY BlockBodies (0x06)", peer, - sent=0, requested=hashes.len - - await response.send(bodies) - - # User message 0x06: BlockBodies. - proc blockBodies(peer: Peer, blocks: openArray[BlockBody]) - - # User message 0x07: NewBlock. - proc newBlock(peer: Peer, blk: EthBlock, totalDifficulty: DifficultyInt) = - # (Note, needs to use `EthBlock` instead of its alias `NewBlockAnnounce` - # because either `p2pProtocol` or RLPx doesn't work with an alias.) - when trEthTraceGossipOk: - trace trEthRecvReceived & "NewBlock (0x07)", peer, - totalDifficulty, - blockNumber = blk.header.number, - blockDifficulty = blk.header.difficulty - - raise newException(EthP2PError, "block broadcasts disallowed") - - # User message 0x08: NewPooledTransactionHashes. - proc newPooledTransactionHashes( - peer: Peer, - txTypes: seq[byte], - txSizes: openArray[uint64], - txHashes: openArray[Hash32] - ) = - when trEthTraceGossipOk: - trace trEthRecvReceived & "NewPooledTransactionHashes (0x08)", peer, - txTypes=txTypes.toHex, txSizes=txSizes.toStr, - hashes=txHashes.len - - # TODO: implementation - - requestResponse: - # User message 0x09: GetPooledTransactions. - proc getPooledTransactions(peer: Peer, txHashes: openArray[Hash32]) = - trace trEthRecvReceived & "GetPooledTransactions (0x09)", peer, - hashes=txHashes.len - - let ctx = peer.networkState() - let txs = ctx.getPooledTransactions(txHashes) - if txs.len > 0: - trace trEthSendReplying & "with PooledTransactions (0x0a)", peer, - sent=txs.len, requested=txHashes.len - else: - trace trEthSendReplying & "EMPTY PooledTransactions (0x0a)", peer, - sent=0, requested=txHashes.len - - await response.send(txs) - - # User message 0x0a: PooledTransactions. - proc pooledTransactions( - peer: Peer, transactions: openArray[PooledTransaction]) - - # User message 0x0d: GetNodeData -- removed, was so 66ish - # User message 0x0e: NodeData -- removed, was so 66ish - - nextId 0x0f - - requestResponse: - # User message 0x0f: GetReceipts. - proc getReceipts(peer: Peer, hashes: openArray[Hash32]) = - trace trEthRecvReceived & "GetReceipts (0x0f)", peer, - hashes=hashes.len - - let ctx = peer.networkState() - let rec = ctx.getReceipts(hashes) - if rec.len > 0: - trace trEthSendReplying & "with Receipts (0x10)", peer, - sent=rec.len, requested=hashes.len - else: - trace trEthSendReplying & "EMPTY Receipts (0x10)", peer, - sent=0, requested=hashes.len - - await response.send(rec) - - # User message 0x10: Receipts. - proc receipts(peer: Peer, receipts: openArray[seq[Receipt]]) diff --git a/execution_chain/sync/wire_protocol/requester.nim b/execution_chain/sync/wire_protocol/requester.nim index 26c06eeab1..970572571a 100644 --- a/execution_chain/sync/wire_protocol/requester.nim +++ b/execution_chain/sync/wire_protocol/requester.nim @@ -21,24 +21,37 @@ export rlpx, p2p_types -const - protocolVersion* = 68 - defineProtocol(PROTO = eth68, - version = protocolVersion, + version = 68, rlpxName = "eth", peerState = EthPeerState, networkState = EthWireRef) +defineProtocol(PROTO = eth69, + version = 69, + rlpxName = "eth", + peerState = Eth69PeerState, + networkState = EthWireRef) + type - StatusPacket* = object - ethVersion*: uint64 + Status68Packet* = object + version*: uint64 networkId*: NetworkId totalDifficulty*: DifficultyInt bestHash*: Hash32 genesisHash*: Hash32 forkId*: ChainForkId + # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#status-0x00 + Status69Packet* = object + version*: uint64 + networkId*: NetworkId + genesisHash*: Hash32 + forkId*: ChainForkId + earliest*: uint64 # earliest available full block + latest*: uint64 # latest available full block + latestHash*: Hash32 # hash of latest available full block + BlockHeadersPacket* = object headers*: seq[Header] @@ -51,6 +64,9 @@ type ReceiptsPacket* = object receipts*: seq[seq[Receipt]] + StoredReceiptsPacket* = object + receipts*: seq[seq[StoredReceipt]] + NewBlockHashesPacket* = object hashes*: seq[NewBlockHashesAnnounce] @@ -66,6 +82,11 @@ type txSizes*: seq[uint64] txHashes*: seq[Hash32] + BlockRangeUpdatePacket* = object + earliest*: uint64 + latest*: uint64 + latestHash*: Hash32 + const StatusMsg* = 0'u64 NewBlockHashesMsg* = 1'u64 @@ -80,73 +101,144 @@ const PooledTransactionsMsg* = 10'u64 GetReceiptsMsg* = 15'u64 ReceiptsMsg* = 16'u64 + # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#blockrangeupdate-0x11 + BlockRangeUpdateMsg* = 0x11'u64 + +func to*(list: openArray[Receipt], _: type seq[StoredReceipt]): seq[StoredReceipt] = + for x in list: + result.add x.to(StoredReceipt) + +func to*(list: openArray[StoredReceipt], _: type seq[Receipt]): seq[Receipt] = + for x in list: + result.add x.to(Receipt) -proc status*(peer: Peer; packet: StatusPacket; +func to*(rec: StoredReceiptsPacket, _: type ReceiptsPacket): ReceiptsPacket = + for x in rec.receipts: + result.receipts.add x.to(seq[Receipt]) + +proc status68*(peer: Peer; packet: Status68Packet; timeout: Duration = milliseconds(10000'i64)): - Future[StatusPacket] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = + Future[Status68Packet] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = let sendingFut = eth68.rlpxSendMessage(peer, StatusMsg, - packet.ethVersion, + packet.version, packet.networkId, packet.totalDifficulty, packet.bestHash, packet.genesisHash, packet.forkId) - responseFut = eth68.nextMsg(peer, StatusPacket, StatusMsg) - handshakeImpl[StatusPacket](peer, sendingFut, responseFut, timeout) + responseFut = eth68.nextMsg(peer, Status68Packet, StatusMsg) + handshakeImpl[Status68Packet](peer, sendingFut, responseFut, timeout) + +proc status69*(peer: Peer; packet: Status69Packet; + timeout: Duration = milliseconds(10000'i64)): + Future[Status69Packet] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = + let + sendingFut = eth69.rlpxSendMessage(peer, StatusMsg, + packet.version, + packet.networkId, + packet.genesisHash, + packet.forkId, + packet.earliest, + packet.latest, + packet.latestHash) + + responseFut = eth69.nextMsg(peer, Status69Packet, StatusMsg) + handshakeImpl[Status69Packet](peer, sendingFut, responseFut, timeout) proc transactions*(peer: Peer; transactions: openArray[Transaction]): Future[ void] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendMessage(peer, TransactionMsg, transactions) + if peer.supports(eth69): + eth69.rlpxSendMessage(peer, TransactionMsg, transactions) + else: + eth68.rlpxSendMessage(peer, TransactionMsg, transactions) proc getBlockHeaders*(peer: Peer; request: BlockHeadersRequest; timeout: Duration = milliseconds(10000'i64)): Future[ Opt[BlockHeadersPacket]] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendRequest(peer, GetBlockHeadersMsg, request) + if peer.supports(eth69): + eth69.rlpxSendRequest(peer, GetBlockHeadersMsg, request) + else: + eth68.rlpxSendRequest(peer, GetBlockHeadersMsg, request) proc blockHeaders*(responder: Responder; headers: openArray[Header]): Future[void] {. async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendMessage(responder, BlockHeadersMsg, headers) + if responder.supports(eth69): + eth69.rlpxSendMessage(responder, BlockHeadersMsg, headers) + else: + eth68.rlpxSendMessage(responder, BlockHeadersMsg, headers) proc getBlockBodies*(peer: Peer; packet: BlockBodiesRequest; timeout: Duration = milliseconds(10000'i64)): Future[ Opt[BlockBodiesPacket]] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendRequest(peer, GetBlockBodiesMsg, packet.blockHashes) + if peer.supports(eth69): + eth69.rlpxSendRequest(peer, GetBlockBodiesMsg, packet.blockHashes) + else: + eth68.rlpxSendRequest(peer, GetBlockBodiesMsg, packet.blockHashes) proc blockBodies*(responder: Responder; bodies: openArray[BlockBody]): Future[void] {. async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendMessage(responder, BlockBodiesMsg, bodies) + if responder.supports(eth69): + eth69.rlpxSendMessage(responder, BlockBodiesMsg, bodies) + else: + eth68.rlpxSendMessage(responder, BlockBodiesMsg, bodies) proc newPooledTransactionHashes*(peer: Peer; txTypes: seq[byte]; txSizes: openArray[uint64]; txHashes: openArray[Hash32]): Future[void] {. async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendMessage(peer, NewPooledTransactionHashesMsg, - txTypes, txSizes, txHashes) + if peer.supports(eth69): + eth69.rlpxSendMessage(peer, NewPooledTransactionHashesMsg, + txTypes, txSizes, txHashes) + else: + eth68.rlpxSendMessage(peer, NewPooledTransactionHashesMsg, + txTypes, txSizes, txHashes) proc getPooledTransactions*(peer: Peer; packet: PooledTransactionsRequest; timeout: Duration = milliseconds(10000'i64)): Future[ Opt[PooledTransactionsPacket]] {.async: ( raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendRequest(peer, GetPooledTransactionsMsg, packet.txHashes) + if peer.supports(eth69): + eth69.rlpxSendRequest(peer, GetPooledTransactionsMsg, packet.txHashes) + else: + eth68.rlpxSendRequest(peer, GetPooledTransactionsMsg, packet.txHashes) proc pooledTransactions*(responder: Responder; transactions: openArray[PooledTransaction]): Future[ void] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendMessage(responder, PooledTransactionsMsg, transactions) + if responder.supports(eth69): + eth69.rlpxSendMessage(responder, PooledTransactionsMsg, transactions) + else: + eth68.rlpxSendMessage(responder, PooledTransactionsMsg, transactions) proc getReceipts*(peer: Peer; packet: ReceiptsRequest; timeout: Duration = milliseconds(10000'i64)): Future[ Opt[ReceiptsPacket]] {.async: (raises: [CancelledError, EthP2PError], raw: true).} = - eth68.rlpxSendRequest(peer, GetReceiptsMsg, packet.blockHashes) + if peer.supports(eth69): + eth69.rlpxSendRequest(peer, GetReceiptsMsg, packet.blockHashes) + else: + eth68.rlpxSendRequest(peer, GetReceiptsMsg, packet.blockHashes) proc receipts*(responder: Responder; receipts: openArray[seq[Receipt]]): Future[void] {. async: (raises: [CancelledError, EthP2PError], raw: true).} = + doAssert(responder.supports(eth68), "'receipts' function with 'Receipt' param only available for eth/68") eth68.rlpxSendMessage(responder, ReceiptsMsg, receipts) + +proc receipts*(responder: Responder; + receipts: openArray[seq[StoredReceipt]]): Future[void] {. + async: (raises: [CancelledError, EthP2PError], raw: true).} = + doAssert(responder.supports(eth69), "'receipts' function with 'StoredReceipt' param only available for eth/69") + eth69.rlpxSendMessage(responder, ReceiptsMsg, receipts) + +proc blockRangeUpdate*(peer: Peer; packet: BlockRangeUpdatePacket): Future[void] {. + async: (raises: [CancelledError, EthP2PError], raw: true).} = + doAssert(peer.supports(eth69), "'blockRangeUpdate' function only available for eth/69") + eth69.rlpxSendMessage(peer, BlockRangeUpdateMsg, + packet.earliest, packet.latest, packet.latestHash) diff --git a/execution_chain/sync/wire_protocol/responder.nim b/execution_chain/sync/wire_protocol/responder.nim index cc9583ce96..5a61e495fd 100644 --- a/execution_chain/sync/wire_protocol/responder.nim +++ b/execution_chain/sync/wire_protocol/responder.nim @@ -13,7 +13,7 @@ import stew/byteutils, ./handler, ./requester, - ./trace_config, + ./trace_config, ../../utils/utils, ../../common/logging, ../../networking/p2p_protocol_dsl, @@ -23,10 +23,10 @@ export requester logScope: - topics = "eth68" + topics = "eth68/69" const - prettyEthProtoName* = "[eth/" & $protocolVersion & "]" + prettyEthProtoName* = "[eth/68/69]" # Pickeled tracer texts trEthRecvReceived* = @@ -67,7 +67,7 @@ const trEthSendNewBlockHashes* = ">> " & prettyEthProtoName & " Sending NewBlockHashes" -proc statusUserHandler(peer: Peer; packet: StatusPacket) {. +proc status68UserHandler(peer: Peer; packet: Status68Packet) {. async: (raises: [CancelledError, EthP2PError]).} = trace trEthRecvReceived & "Status (0x00)", peer, networkId = packet.networkId, @@ -77,14 +77,32 @@ proc statusUserHandler(peer: Peer; packet: StatusPacket) {. forkHash = packet.forkId.forkHash.toHex, forkNext = packet.forkId.forkNext -proc statusThunk(peer: Peer; data: Rlp) {. +proc status68Thunk(peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(StatusPacket, peer, data, - [ethVersion, networkId, + eth68.rlpxWithPacketHandler(Status68Packet, peer, data, + [version, networkId, totalDifficulty, bestHash, genesisHash, forkId]): - await statusUserHandler(peer, packet) + await status68UserHandler(peer, packet) +proc status69UserHandler(peer: Peer; packet: Status69Packet) {. + async: (raises: [CancelledError, EthP2PError]).} = + trace trEthRecvReceived & "Status (0x00)", peer, + networkId = packet.networkId, + genesisHash = packet.genesisHash.short, + forkHash = packet.forkId.forkHash.toHex, + forkNext = packet.forkId.forkNext, + earliest = packet.earliest, + latest = packet.latest, + latestHash = packet.latestHash.short + +proc status69Thunk(peer: Peer; data: Rlp) {. + async: (raises: [CancelledError, EthP2PError]).} = + eth69.rlpxWithPacketHandler(Status69Packet, peer, data, + [version, networkId, + genesisHash, forkId, + earliest, latest, latestHash]): + await status69UserHandler(peer, packet) proc newBlockHashesUserHandler(peer: Peer; packet: NewBlockHashesPacket) {. async: (raises: [CancelledError, EthP2PError]).} = @@ -92,27 +110,26 @@ proc newBlockHashesUserHandler(peer: Peer; packet: NewBlockHashesPacket) {. trace trEthRecvReceived & "NewBlockHashes (0x01)", peer, hashes = packet.hashes.len raise newException(EthP2PError, "block broadcasts disallowed") -proc newBlockHashesThunk(peer: Peer; data: Rlp) {. +proc newBlockHashesThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(NewBlockHashesPacket, peer, data, [hashes]): + PROTO.rlpxWithPacketHandler(NewBlockHashesPacket, peer, data, [hashes]): await newBlockHashesUserHandler(peer, packet) - -proc transactionsUserHandler(peer: Peer; packet: TransactionsPacket) {. +proc transactionsUserHandler[PROTO](peer: Peer; packet: TransactionsPacket) {. async: (raises: [CancelledError, EthP2PError]).} = when trEthTraceGossipOk: trace trEthRecvReceived & "Transactions (0x02)", peer, transactions = packet.transactions.len - let ctx = peer.networkState(eth68) + let ctx = peer.networkState(PROTO) ctx.handleAnnouncedTxs(packet) -proc transactionsThunk(peer: Peer; data: Rlp) {. +proc transactionsThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(TransactionsPacket, peer, data, [transactions]): - await transactionsUserHandler(peer, packet) + PROTO.rlpxWithPacketHandler(TransactionsPacket, peer, data, [transactions]): + await transactionsUserHandler[PROTO](peer, packet) -proc getBlockHeadersUserHandler(response: Responder; +proc getBlockHeadersUserHandler[PROTO](response: Responder; request: BlockHeadersRequest) {. async: (raises: [CancelledError, EthP2PError]).} = @@ -120,7 +137,7 @@ proc getBlockHeadersUserHandler(response: Responder; when trEthTracePacketsOk: trace trEthRecvReceived & "GetBlockHeaders (0x03)", peer, count = request.maxResults - let ctx = peer.networkState(eth68) + let ctx = peer.networkState(PROTO) let headers = ctx.getBlockHeaders(request) if headers.len > 0: trace trEthSendReplying & "with BlockHeaders (0x04)", peer, @@ -130,30 +147,30 @@ proc getBlockHeadersUserHandler(response: Responder; requested = request.maxResults await response.blockHeaders(headers) -proc getBlockHeadersThunk(peer: Peer; data: Rlp) {. +proc getBlockHeadersThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketResponder(BlockHeadersRequest, peer, data): - await getBlockHeadersUserHandler(response, packet) + PROTO.rlpxWithPacketResponder(BlockHeadersRequest, peer, data): + await getBlockHeadersUserHandler[PROTO](response, packet) -proc blockHeadersThunk(peer: Peer; data: Rlp) {. +proc blockHeadersThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithFutureHandler(BlockHeadersPacket, + PROTO.rlpxWithFutureHandler(BlockHeadersPacket, BlockHeadersMsg, peer, data, [headers]) -proc blockBodiesThunk(peer: Peer; data: Rlp) {. +proc blockBodiesThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithFutureHandler(BlockBodiesPacket, + PROTO.rlpxWithFutureHandler(BlockBodiesPacket, BlockBodiesMsg, peer, data, [bodies]) -proc getBlockBodiesUserHandler(response: Responder; hashes: seq[Hash32]) {. +proc getBlockBodiesUserHandler[PROTO](response: Responder; hashes: seq[Hash32]) {. async: (raises: [CancelledError, EthP2PError]).} = let peer = response.peer trace trEthRecvReceived & "GetBlockBodies (0x05)", peer, hashes = hashes.len - let ctx = peer.networkState(eth68) + let ctx = peer.networkState(PROTO) let bodies = ctx.getBlockBodies(hashes) if bodies.len > 0: trace trEthSendReplying & "with BlockBodies (0x06)", peer, @@ -163,10 +180,10 @@ proc getBlockBodiesUserHandler(response: Responder; hashes: seq[Hash32]) {. requested = hashes.len await response.blockBodies(bodies) -proc getBlockBodiesThunk(peer: Peer; data: Rlp) {. +proc getBlockBodiesThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketResponder(seq[Hash32], peer, data): - await getBlockBodiesUserHandler(response, packet) + PROTO.rlpxWithPacketResponder(seq[Hash32], peer, data): + await getBlockBodiesUserHandler[PROTO](response, packet) proc newBlockUserHandler(peer: Peer; packet: NewBlockPacket) {. @@ -177,9 +194,9 @@ proc newBlockUserHandler(peer: Peer; packet: NewBlockPacket) {. blockDifficulty = packet.blk.header.difficulty raise newException(EthP2PError, "block broadcasts disallowed") -proc newBlockThunk(peer: Peer; data: Rlp) {. +proc newBlockThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(NewBlockPacket, peer, data, [blk, totalDifficulty]): + PROTO.rlpxWithPacketHandler(NewBlockPacket, peer, data, [blk, totalDifficulty]): await newBlockUserHandler(peer, packet) @@ -191,21 +208,21 @@ proc newPooledTransactionHashesUserHandler(peer: Peer; packet: NewPooledTransact txTypes = packet.txTypes.toHex, txSizes = packet.txSizes.toStr, hashes = packet.txHashes.len -proc newPooledTransactionHashesThunk(peer: Peer; data: Rlp) {. +proc newPooledTransactionHashesThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(NewPooledTransactionHashesPacket, + PROTO.rlpxWithPacketHandler(NewPooledTransactionHashesPacket, peer, data, [txTypes, txSizes, txHashes]): await newPooledTransactionHashesUserHandler(peer, packet) -proc getPooledTransactionsUserHandler(response: Responder; +proc getPooledTransactionsUserHandler[PROTO](response: Responder; txHashes: seq[Hash32]) {. async: (raises: [CancelledError, EthP2PError]).} = let peer = response.peer trace trEthRecvReceived & "GetPooledTransactions (0x09)", peer, hashes = txHashes.len - let ctx = peer.networkState(eth68) + let ctx = peer.networkState(PROTO) let txs = ctx.getPooledTransactions(txHashes) if txs.len > 0: trace trEthSendReplying & "with PooledTransactions (0x0a)", peer, @@ -215,24 +232,27 @@ proc getPooledTransactionsUserHandler(response: Responder; requested = txHashes.len await response.pooledTransactions(txs) -proc getPooledTransactionsThunk(peer: Peer; data: Rlp) {. +proc getPooledTransactionsThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketResponder(seq[Hash32], peer, data): - await getPooledTransactionsUserHandler(response, packet) + PROTO.rlpxWithPacketResponder(seq[Hash32], peer, data): + await getPooledTransactionsUserHandler[PROTO](response, packet) -proc pooledTransactionsThunk(peer: Peer; data: Rlp) {. +proc pooledTransactionsThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithFutureHandler(PooledTransactionsPacket, + PROTO.rlpxWithFutureHandler(PooledTransactionsPacket, PooledTransactionsMsg, peer, data, [transactions]) -proc getReceiptsUserHandler(response: Responder; hashes: seq[Hash32]) {. +proc getReceiptsUserHandler[PROTO](response: Responder; hashes: seq[Hash32]) {. async: (raises: [CancelledError, EthP2PError]).} = let peer = response.peer trace trEthRecvReceived & "GetReceipts (0x0f)", peer, hashes = hashes.len - let ctx = peer.networkState(eth68) - let rec = ctx.getReceipts(hashes) + let ctx = peer.networkState(PROTO) + let rec = when PROTO is eth69: + ctx.getStoredReceipts(hashes) + else: + ctx.getReceipts(hashes) if rec.len > 0: trace trEthSendReplying & "with Receipts (0x10)", peer, sent = rec.len, requested = hashes.len @@ -241,16 +261,37 @@ proc getReceiptsUserHandler(response: Responder; hashes: seq[Hash32]) {. requested = hashes.len await response.receipts(rec) -proc getReceiptsThunk(peer: Peer; data: Rlp) {. +proc getReceiptsThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketResponder(seq[Hash32], peer, data): - await getReceiptsUserHandler(response, packet) + PROTO.rlpxWithPacketResponder(seq[Hash32], peer, data): + await getReceiptsUserHandler[PROTO](response, packet) +proc receiptsThunk[PROTO](peer: Peer; data: Rlp) {. + async: (raises: [CancelledError, EthP2PError]).} = + when PROTO is eth69: + PROTO.rlpxWithFutureHandler(StoredReceiptsPacket, ReceiptsPacket, + ReceiptsMsg, peer, data, [receipts]) + else: + PROTO.rlpxWithFutureHandler(ReceiptsPacket, + ReceiptsMsg, peer, data, [receipts]) + +proc blockRangeUpdateUserHandler(peer: Peer; packet: BlockRangeUpdatePacket) {. + async: (raises: [CancelledError, EthP2PError]).} = -proc receiptsThunk(peer: Peer; data: Rlp) {. + when trEthTraceGossipOk: + trace trEthRecvReceived & "BlockRangeUpdate (0x11)", peer, + earliest = packet.earliest, latest = packet.latest, + latestHash = packet.latestHash.short + + peer.state(eth69).earliest = packet.earliest + peer.state(eth69).latest = packet.latest + peer.state(eth69).latestHash = packet.latestHash + +proc blockRangeUpdateThunk(peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithFutureHandler(ReceiptsPacket, - ReceiptsMsg, peer, data, [receipts]) + eth68.rlpxWithPacketHandler(BlockRangeUpdatePacket, + peer, data, [earliest, latest, latestHash]): + await blockRangeUpdateUserHandler(peer, packet) proc eth68PeerConnected(peer: Peer) {.async: ( @@ -258,9 +299,9 @@ proc eth68PeerConnected(peer: Peer) {.async: ( let network = peer.network ctx = peer.networkState(eth68) - status = ctx.getStatus() - packet = StatusPacket( - ethVersion: protocolVersion, + status = ctx.getStatus68() + packet = Status68Packet( + version: eth68.protocolVersion, networkId : network.networkId, totalDifficulty: status.totalDifficulty, bestHash: status.bestBlockHash, @@ -276,7 +317,7 @@ proc eth68PeerConnected(peer: Peer) {.async: ( forkHash = status.forkId.forkHash.toHex, forkNext = status.forkId.forkNext - let m = await peer.status(packet, timeout = chronos.seconds(10)) + let m = await peer.status68(packet, timeout = chronos.seconds(10)) when trEthTraceHandshakesOk: trace "Handshake: Local and remote networkId", local = network.networkId, remote = m.networkId @@ -297,42 +338,104 @@ proc eth68PeerConnected(peer: Peer) {.async: ( trace "Peer matches our network", peer peer.state(eth68).initialized = true - peer.state(eth68).bestDifficulty = m.totalDifficulty - peer.state(eth68).bestBlockHash = m.bestHash - -proc eth68Registration() = +proc eth69PeerConnected(peer: Peer) {.async: ( + raises: [CancelledError, EthP2PError]).} = let - protocol = eth68.initProtocol() + network = peer.network + ctx = peer.networkState(eth69) + status = ctx.getStatus69() + packet = Status69Packet( + version: eth69.protocolVersion, + networkId : network.networkId, + genesisHash: status.genesisHash, + forkId: status.forkId, + earliest: status.earliest, + latest: status.latest, + latestHash: status.latestHash, + ) - setEventHandlers(protocol, eth68PeerConnected, nil) - registerMsg(protocol, StatusMsg, "status", - statusThunk, StatusPacket) + trace trEthSendSending & "Status (0x00)", peer, + earliest = status.earliest, + latest = status.latest, + latestHash = status.latestHash.short, + networkId = network.networkId, + genesis = short(status.genesisHash), + forkHash = status.forkId.forkHash.toHex, + forkNext = status.forkId.forkNext + + let m = await peer.status69(packet, timeout = chronos.seconds(10)) + when trEthTraceHandshakesOk: + trace "Handshake: Local and remote networkId", local = network.networkId, + remote = m.networkId + trace "Handshake: Local and remote genesisHash", + local = short(status.genesisHash), remote = short(m.genesisHash) + trace "Handshake: Local and remote forkId", local = ( + status.forkId.forkHash.toHex & "/" & $status.forkId.forkNext), + remote = (m.forkId.forkHash.toHex & "/" & $m.forkId.forkNext) + if m.networkId != network.networkId: + trace "Peer for a different network (networkId)", peer, + expectNetworkId = network.networkId, gotNetworkId = m.networkId + raise newException(UselessPeerError, "Eth handshake for different network") + if m.genesisHash != status.genesisHash: + trace "Peer for a different network (genesisHash)", peer, + expectGenesis = short(status.genesisHash), + gotGenesis = short(m.genesisHash) + raise newException(UselessPeerError, "Eth handshake for different network") + trace "Peer matches our network", peer + + peer.state(eth69).initialized = true + peer.state(eth69).earliest = m.earliest + peer.state(eth69).latest = m.latest + peer.state(eth69).latestHash = m.latestHash + +template registerCommonThunk(protocol: ProtocolInfo, PROTO: type) = registerMsg(protocol, NewBlockHashesMsg, "newBlockHashes", - newBlockHashesThunk, NewBlockHashesPacket) + newBlockHashesThunk[PROTO], NewBlockHashesPacket) registerMsg(protocol, TransactionMsg, "transactions", - transactionsThunk, TransactionsPacket) + transactionsThunk[PROTO], TransactionsPacket) registerMsg(protocol, BlockHeadersMsg, "blockHeaders", - blockHeadersThunk, BlockHeadersPacket) + blockHeadersThunk[PROTO], BlockHeadersPacket) registerMsg(protocol, GetBlockHeadersMsg, "getBlockHeaders", - getBlockHeadersThunk, BlockHeadersRequest) + getBlockHeadersThunk[PROTO], BlockHeadersRequest) registerMsg(protocol, BlockBodiesMsg, "blockBodies", - blockBodiesThunk, BlockBodiesPacket) + blockBodiesThunk[PROTO], BlockBodiesPacket) registerMsg(protocol, GetBlockBodiesMsg, "getBlockBodies", - getBlockBodiesThunk, BlockBodiesRequest) + getBlockBodiesThunk[PROTO], BlockBodiesRequest) registerMsg(protocol, NewBlockMsg, "newBlock", - newBlockThunk, NewBlockPacket) + newBlockThunk[PROTO], NewBlockPacket) registerMsg(protocol, NewPooledTransactionHashesMsg, "newPooledTransactionHashes", - newPooledTransactionHashesThunk, NewPooledTransactionHashesPacket) + newPooledTransactionHashesThunk[PROTO], NewPooledTransactionHashesPacket) registerMsg(protocol, PooledTransactionsMsg, "pooledTransactions", - pooledTransactionsThunk, PooledTransactionsPacket) + pooledTransactionsThunk[PROTO], PooledTransactionsPacket) registerMsg(protocol, GetPooledTransactionsMsg, "getPooledTransactions", - getPooledTransactionsThunk, PooledTransactionsRequest) + getPooledTransactionsThunk[PROTO], PooledTransactionsRequest) registerMsg(protocol, ReceiptsMsg, "receipts", - receiptsThunk, ReceiptsPacket) + receiptsThunk[PROTO], ReceiptsPacket) registerMsg(protocol, GetReceiptsMsg, "getReceipts", - getReceiptsThunk, ReceiptsRequest) + getReceiptsThunk[PROTO], ReceiptsRequest) + +proc eth68Registration() = + let + protocol = eth68.initProtocol() + setEventHandlers(protocol, eth68PeerConnected, nil) + registerMsg(protocol, StatusMsg, "status", + status68Thunk, Status68Packet) + registerCommonThunk(protocol, eth68) + registerProtocol(protocol) + +proc eth69Registration() = + let + protocol = eth69.initProtocol() + + setEventHandlers(protocol, eth69PeerConnected, nil) + registerMsg(protocol, StatusMsg, "status", + status69Thunk, Status69Packet) + registerCommonThunk(protocol, eth69) + registerMsg(protocol, BlockRangeUpdateMsg, "blockRangeUpdate", + blockRangeUpdateThunk, BlockRangeUpdatePacket) registerProtocol(protocol) eth68Registration() +eth69Registration() diff --git a/execution_chain/sync/wire_protocol/setup.nim b/execution_chain/sync/wire_protocol/setup.nim index 348d9685f8..c5e3847e38 100644 --- a/execution_chain/sync/wire_protocol/setup.nim +++ b/execution_chain/sync/wire_protocol/setup.nim @@ -23,10 +23,10 @@ proc addEthHandlerCapability*( node: EthereumNode; txPool: TxPoolRef; ) = - ## Install `eth` handlers. - node.addCapability( - requester.eth68, - EthWireRef.new(txPool)) + ## Install wire prototcol handlers for each cap. + let wire = EthWireRef.new(txPool) + node.addCapability(eth68, wire) + node.addCapability(eth69, wire) # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/wire_protocol/trace_config.nim b/execution_chain/sync/wire_protocol/trace_config.nim index db38ce1bf7..9738d01452 100644 --- a/execution_chain/sync/wire_protocol/trace_config.nim +++ b/execution_chain/sync/wire_protocol/trace_config.nim @@ -1,6 +1,6 @@ -# Nimbus - Ethereum Wire Protocol, version eth/65 +# nimbus-execution-client # -# Copyright (c) 2018-2021 Status Research & Development GmbH +# Copyright (c) 2018-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -17,15 +17,5 @@ const ## `trace` log each sync network message. trEthTraceHandshakesOk* = true ## `trace` log each network handshake message. - trEthTraceIndividualNodesOk* = true - ## `trace` log each trie node, account, storage, receipt, etc. - - # Some static noisy settings for `snap` debugging - trSnapTracePacketsOk* = true - ## `trace` log each sync network message. - - # Shut up particular eth context handler gossip - trMissingOrDisabledGossipOk* = true and false - ## Control `handleAnnouncedTxsHashes`, `handleAnnouncedTxsHashes`, etc. # End diff --git a/execution_chain/sync/wire_protocol/types.nim b/execution_chain/sync/wire_protocol/types.nim index 8886739f73..b179475eb0 100644 --- a/execution_chain/sync/wire_protocol/types.nim +++ b/execution_chain/sync/wire_protocol/types.nim @@ -13,7 +13,7 @@ import eth/common, ../../core/[chain, tx_pool] - + type NewBlockHashesAnnounce* = object hash*: Hash32 @@ -23,16 +23,26 @@ type forkHash*: array[4, byte] # The RLP encoding must be exactly 4 bytes. forkNext*: uint64 # The RLP encoding must be variable-length - EthState* = object + Eth68State* = object totalDifficulty*: DifficultyInt genesisHash*: Hash32 bestBlockHash*: Hash32 forkId*: ChainForkId + Eth69State* = object + genesisHash*: Hash32 + forkId*: ChainForkId + earliest*: uint64 + latest*: uint64 + latestHash*: Hash32 + EthPeerState* = ref object of RootRef initialized*: bool - bestBlockHash*: Hash32 - bestDifficulty*: DifficultyInt + + Eth69PeerState* = ref object of EthPeerState + earliest*: uint64 + latest*: uint64 + latestHash*: Hash32 BlockHeadersRequest* = object startBlock*: BlockHashOrNumber @@ -51,4 +61,3 @@ type EthWireRef* = ref object of RootRef chain* : ForkedChainRef txPool*: TxPoolRef - \ No newline at end of file diff --git a/hive_integration/nodocker/rpc/test_env.nim b/hive_integration/nodocker/rpc/test_env.nim index 58ae2e9796..d468037341 100644 --- a/hive_integration/nodocker/rpc/test_env.nim +++ b/hive_integration/nodocker/rpc/test_env.nim @@ -78,7 +78,7 @@ proc setupEnv*(taskPool: Taskpool): TestEnv = let ethCtx = newEthContext() - ethNode = setupEthNode(conf, ethCtx, eth) + ethNode = setupEthNode(conf, ethCtx, eth68, eth69) com = CommonRef.new(newCoreDbRef DefaultDbMemory, taskPool, conf.networkId, diff --git a/tests/test_rpc.nim b/tests/test_rpc.nim index 28074715f5..3210a98469 100644 --- a/tests/test_rpc.nim +++ b/tests/test_rpc.nim @@ -191,7 +191,7 @@ proc setupEnv(envFork: HardFork = MergeFork): TestEnv = serverApi = newServerAPI(txPool) client = setupClient(server.localAddress[0].port) ctx = newEthContext() - node = setupEthNode(conf, ctx, eth) + node = setupEthNode(conf, ctx, eth68, eth69) ctx.am.loadKeystores(keyStore).isOkOr: debugEcho error diff --git a/vendor/nim-eth b/vendor/nim-eth index 5957dce55a..a1b38c2591 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit 5957dce55a4bfe00899ecc14006f72c6608b43df +Subproject commit a1b38c25919fa602292a4071631c8f07ee1b2114 From fb38003b90e0ddddeb0dd892ac2a5bdddfdbbb30 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Thu, 15 May 2025 21:28:45 +0530 Subject: [PATCH 012/138] add a flag for disableMarchNative (#3286) --- Dockerfile | 2 +- Makefile | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index db59ac29b8..4a82251ba0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,7 @@ ADD . /root/nimbus-eth1 RUN cd /root/nimbus-eth1 \ && make -j$(nproc) update-from-ci \ - && make -j$(nproc) V=1 nimbus + && make -j$(nproc) DISABLE_MARCH_NATIVE=1 V=1 nimbus_execution_client # --------------------------------- # # Starting new image to reduce size # diff --git a/Makefile b/Makefile index 80a42e82ae..150e6a4c43 100644 --- a/Makefile +++ b/Makefile @@ -168,6 +168,11 @@ NIM_PARAMS += -d:release ifneq ($(if $(ENABLE_LINE_NUMBERS),$(ENABLE_LINE_NUMBERS),0),0) NIM_PARAMS += -d:chronicles_line_numbers:1 endif + +ifeq ($(DISABLE_MARCH_NATIVE),1) +NIM_PARAMS += -d:disableMarchNative +endif + ifeq ($(BOEHM_GC),1) NIM_PARAMS += --mm:boehm endif From 28c7b737263f4537183088cbd99e8df00760be6e Mon Sep 17 00:00:00 2001 From: tersec Date: Fri, 16 May 2025 22:50:34 +0000 Subject: [PATCH 013/138] rm vestigial EIP-7873 support (#3290) * rm vestigial EIP-7873 support * bump nim-eth and nim-web3 --- .../nodocker/engine/engine_client.nim | 4 +- tools/t8n/helpers.nim | 56 +++++++++---------- tools/t8n/types.nim | 3 +- vendor/nim-eth | 2 +- vendor/nim-web3 | 2 +- 5 files changed, 31 insertions(+), 36 deletions(-) diff --git a/hive_integration/nodocker/engine/engine_client.nim b/hive_integration/nodocker/engine/engine_client.nim index 88e317b3e8..fbaafb60b7 100644 --- a/hive_integration/nodocker/engine/engine_client.nim +++ b/hive_integration/nodocker/engine/engine_client.nim @@ -80,7 +80,7 @@ proc forkchoiceUpdated*(client: RpcClient, of Version.V1: return client.forkchoiceUpdatedV1(update, attr.V1) of Version.V2: return client.forkchoiceUpdatedV2(update, attr) of Version.V3: return client.forkchoiceUpdatedV3(update, attr) - of Version.V4: discard + of Version.V4, Version.V5: discard proc getPayloadV1*(client: RpcClient, payloadId: Bytes8): Result[ExecutionPayloadV1, string] = wrapTrySimpleRes: @@ -212,7 +212,7 @@ proc newPayload*(client: RpcClient, return client.newPayloadV3(payload.basePayload, payload.versionedHashes, payload.beaconRoot) - of Version.V4: + of Version.V4, Version.V5: # Osaka doesn't define any new newPayloadV5 return client.newPayloadV4(payload.basePayload, payload.versionedHashes, payload.beaconRoot, diff --git a/tools/t8n/helpers.nim b/tools/t8n/helpers.nim index c05818b025..a5043b92f2 100644 --- a/tools/t8n/helpers.nim +++ b/tools/t8n/helpers.nim @@ -46,7 +46,7 @@ template wrapValueError(body: untyped) = except ValueError as exc: r.raiseUnexpectedValue(exc.msg) -proc parseHexOrInt[T](x: string): T {.raises: [ValueError].} = +func parseHexOrInt[T](x: string): T {.raises: [ValueError].} = when T is UInt256: if x.startsWith("0x"): UInt256.fromHex(x) @@ -258,10 +258,6 @@ proc parseTxJson(txo: TxObject, chainId: ChainId): Result[Transaction, string] = required(maxFeePerGas) optional(accessList) required(authorizationList) - of TxEip7873: - required(chainId) - optional(accessList) - required(initCodes) # Ignore chainId if txType == TxLegacy if tx.txType > TxLegacy and tx.chainId != chainId: @@ -278,7 +274,7 @@ proc parseTxJson(txo: TxObject, chainId: ChainId): Result[Transaction, string] = required(s, S) ok(tx) -proc readNestedTx(rlp: var Rlp, chainId: ChainId): Result[Transaction, string] = +func readNestedTx(rlp: var Rlp, chainId: ChainId): Result[Transaction, string] = try: let tx = if rlp.isList: rlp.read(Transaction) @@ -292,7 +288,7 @@ proc readNestedTx(rlp: var Rlp, chainId: ChainId): Result[Transaction, string] = except RlpError as exc: err(exc.msg) -proc parseTxs*(ctx: var TransContext, chainId: ChainId) +func parseTxs*(ctx: var TransContext, chainId: ChainId) {.raises: [T8NError, RlpError].} = var numTxs = ctx.txsJson.len var rlp: Rlp @@ -311,7 +307,7 @@ proc parseTxs*(ctx: var TransContext, chainId: ChainId) for item in rlp: ctx.txList.add rlp.readNestedTx(chainId) -proc filterGoodTransactions*(ctx: TransContext): seq[Transaction] = +func filterGoodTransactions*(ctx: TransContext): seq[Transaction] = for txRes in ctx.txList: if txRes.isOk: result.add txRes.get @@ -336,7 +332,7 @@ proc parseEnv*(ctx: var TransContext, envFile: string) {.raises: [T8NError].} = wrapException: ctx.env = T8Conv.loadFile(envFile, EnvStruct) -proc parseTxsRlp*(ctx: var TransContext, hexData: string) {.raises: [ValueError].} = +func parseTxsRlp*(ctx: var TransContext, hexData: string) {.raises: [ValueError].} = ctx.txsRlp = hexToSeqByte(hexData) proc parseInputFromStdin*(ctx: var TransContext) {.raises: [T8NError].} = @@ -354,42 +350,42 @@ template stripLeadingZeros(value: string): string = cidx.inc value[cidx .. ^1] -proc `@@`*[K, V](x: Table[K, V]): JsonNode -proc `@@`*[T](x: seq[T]): JsonNode +func `@@`*[K, V](x: Table[K, V]): JsonNode +func `@@`*[T](x: seq[T]): JsonNode -proc to0xHex(x: UInt256): string = +func to0xHex(x: UInt256): string = "0x" & x.toHex -proc `@@`(x: uint64 | int64 | int): JsonNode = +func `@@`(x: uint64 | int64 | int): JsonNode = let hex = x.toHex.stripLeadingZeros %("0x" & hex.toLowerAscii) -proc `@@`(x: UInt256): JsonNode = +func `@@`(x: UInt256): JsonNode = %("0x" & x.toHex) -proc `@@`(x: Hash32): JsonNode = +func `@@`(x: Hash32): JsonNode = %("0x" & x.data.toHex) -proc `@@`*(x: seq[byte]): JsonNode = +func `@@`*(x: seq[byte]): JsonNode = %("0x" & x.toHex) -proc `@@`(x: bool): JsonNode = +func `@@`(x: bool): JsonNode = %(if x: "0x1" else: "0x0") -proc `@@`(x: openArray[byte]): JsonNode = +func `@@`(x: openArray[byte]): JsonNode = %("0x" & x.toHex) -proc `@@`(x: FixedBytes|Hash32|Address): JsonNode = +func `@@`(x: FixedBytes|Hash32|Address): JsonNode = @@(x.data) -proc toJson(x: Table[UInt256, UInt256]): JsonNode = +func toJson(x: Table[UInt256, UInt256]): JsonNode = # special case, we need to convert UInt256 into full 32 bytes # and not shorter result = newJObject() for k, v in x: result["0x" & k.dumpHex] = %("0x" & v.dumpHex) -proc `@@`(acc: GenesisAccount): JsonNode = +func `@@`(acc: GenesisAccount): JsonNode = result = newJObject() if acc.code.len > 0: result["code"] = @@(acc.code) @@ -399,22 +395,22 @@ proc `@@`(acc: GenesisAccount): JsonNode = if acc.storage.len > 0: result["storage"] = toJson(acc.storage) -proc `@@`[K, V](x: Table[K, V]): JsonNode = +func `@@`[K, V](x: Table[K, V]): JsonNode = result = newJObject() for k, v in x: result[k.to0xHex] = @@(v) -proc `@@`(x: Bloom): JsonNode = +func `@@`(x: Bloom): JsonNode = %("0x" & toHex(x)) -proc `@@`(x: Log): JsonNode = +func `@@`(x: Log): JsonNode = %{ "address": @@(x.address), "topics" : @@(x.topics), "data" : @@(x.data) } -proc `@@`(x: TxReceipt): JsonNode = +func `@@`(x: TxReceipt): JsonNode = result = %{ "root" : if x.root == default(Hash32): %("0x") else: @@(x.root), "status" : @@(x.status), @@ -430,29 +426,29 @@ proc `@@`(x: TxReceipt): JsonNode = if x.txType > TxLegacy: result["type"] = %("0x" & toHex(x.txType.int, 1)) -proc `@@`(x: RejectedTx): JsonNode = +func `@@`(x: RejectedTx): JsonNode = %{ "index": %(x.index), "error": %(x.error) } -proc `@@`[T](x: seq[T]): JsonNode = +func `@@`[T](x: seq[T]): JsonNode = result = newJArray() for c in x: result.add @@(c) -proc `@@`[N, T](x: array[N, T]): JsonNode = +func `@@`[N, T](x: array[N, T]): JsonNode = result = newJArray() for c in x: result.add @@(c) -proc `@@`[T](x: Opt[T]): JsonNode = +func `@@`[T](x: Opt[T]): JsonNode = if x.isNone: newJNull() else: @@(x.get()) -proc `@@`*(x: ExecutionResult): JsonNode = +func `@@`*(x: ExecutionResult): JsonNode = result = %{ "stateRoot" : @@(x.stateRoot), "txRoot" : @@(x.txRoot), diff --git a/tools/t8n/types.nim b/tools/t8n/types.nim index 3f95c15368..616149b659 100644 --- a/tools/t8n/types.nim +++ b/tools/t8n/types.nim @@ -74,7 +74,6 @@ type maxFeePerBlobGas* : Opt[UInt256] blobVersionedHashes* : Opt[seq[Hash32]] authorizationList* : Opt[seq[Authorization]] - initCodes* : Opt[seq[seq[byte]]] TxList* = seq[Result[Transaction, string]] @@ -130,5 +129,5 @@ const ErrorIO* = 11.T8NExitCode ErrorRlp* = 12.T8NExitCode -proc newError*(code: T8NExitCode, msg: string): ref T8NError = +func newError*(code: T8NExitCode, msg: string): ref T8NError = (ref T8NError)(exitCode: code, msg: msg) diff --git a/vendor/nim-eth b/vendor/nim-eth index a1b38c2591..5c3969a5c1 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit a1b38c25919fa602292a4071631c8f07ee1b2114 +Subproject commit 5c3969a5c12c7c5acc3d223723a8c005467deea6 diff --git a/vendor/nim-web3 b/vendor/nim-web3 index 3ef986c9d9..7de20af8e4 160000 --- a/vendor/nim-web3 +++ b/vendor/nim-web3 @@ -1 +1 @@ -Subproject commit 3ef986c9d93604775595f116a35c6ac0bf5257fc +Subproject commit 7de20af8e4d3ae61fb67028ff0295f790268f706 From 7c74bddbd7b3442ac773961a86e3babd2c31b3cc Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Sat, 17 May 2025 15:54:50 +0530 Subject: [PATCH 014/138] bump nim-eth to addcbfa4394727dabacd26856beb2a1931b483f6 (#3295) --- execution_chain/sync/wire_protocol/requester.nim | 8 -------- vendor/nim-eth | 2 +- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/execution_chain/sync/wire_protocol/requester.nim b/execution_chain/sync/wire_protocol/requester.nim index 970572571a..ad269fd78e 100644 --- a/execution_chain/sync/wire_protocol/requester.nim +++ b/execution_chain/sync/wire_protocol/requester.nim @@ -104,14 +104,6 @@ const # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#blockrangeupdate-0x11 BlockRangeUpdateMsg* = 0x11'u64 -func to*(list: openArray[Receipt], _: type seq[StoredReceipt]): seq[StoredReceipt] = - for x in list: - result.add x.to(StoredReceipt) - -func to*(list: openArray[StoredReceipt], _: type seq[Receipt]): seq[Receipt] = - for x in list: - result.add x.to(Receipt) - func to*(rec: StoredReceiptsPacket, _: type ReceiptsPacket): ReceiptsPacket = for x in rec.receipts: result.receipts.add x.to(seq[Receipt]) diff --git a/vendor/nim-eth b/vendor/nim-eth index 5c3969a5c1..addcbfa439 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit 5c3969a5c12c7c5acc3d223723a8c005467deea6 +Subproject commit addcbfa4394727dabacd26856beb2a1931b483f6 From 54e6193748e90d2674f6f64c40ccd8cd0e674903 Mon Sep 17 00:00:00 2001 From: andri lim Date: Sun, 18 May 2025 23:20:11 +0700 Subject: [PATCH 015/138] Bump nim-chronos to b55e2816eb45f698ddaca8d8473e401502562db2 (#3296) --- vendor/nim-chronos | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-chronos b/vendor/nim-chronos index c04576d829..b55e2816eb 160000 --- a/vendor/nim-chronos +++ b/vendor/nim-chronos @@ -1 +1 @@ -Subproject commit c04576d829b8a0a1b12baaa8bc92037501b3a4a0 +Subproject commit b55e2816eb45f698ddaca8d8473e401502562db2 From 3bd6188f50bfa3f999826031a5f867b59489fc9d Mon Sep 17 00:00:00 2001 From: andri lim Date: Sun, 18 May 2025 23:21:03 +0700 Subject: [PATCH 016/138] Bump nim-libbacktrace to 99cd1a3f1568e7cfbbb6d886c93e4452dc65e4ef (#3297) --- vendor/nim-libbacktrace | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-libbacktrace b/vendor/nim-libbacktrace index b6e26f03c0..99cd1a3f15 160000 --- a/vendor/nim-libbacktrace +++ b/vendor/nim-libbacktrace @@ -1 +1 @@ -Subproject commit b6e26f03c091a8e3bba6adc06198fc3055bacc66 +Subproject commit 99cd1a3f1568e7cfbbb6d886c93e4452dc65e4ef From 57d3748c6142ffd013ea83c80946669e1242bf00 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Sun, 18 May 2025 21:45:45 +0200 Subject: [PATCH 017/138] Rename fluffy to portal/nimbus_portal_client (#3289) * Rename fluffy to portal/nimbus_portal_client A bunch of renames to remove the fluffy naming and related changes - fluffy dir -> portal dir - fluffy binary -> nimbus_portal_client - portal_bridge binary -> nimbus_portal_bridge - + renamed related make targets - Restructure of portal directory for the applications (client + bridge) - Rename of default data dir for nimbus_portal_client and nimbus_portal_bridge - Remove most of fluffy naming in code / docs - Move docker folder one level up - Move grafana folder into metrics folder Items that are of importance regarding backwards compatiblity: - Kept make target for fluffy and portal_bridge - Data-dir is first check to see if the legacy dir exists, if not the new dir is created - ENR file is currently still name fluffy_node.enr * Move legacy files to new naming + create deprecation file Also fix nimble tasks * More fixes/changes - Change lock file name to portal_node.lock - Fix debug docker files - Fix portal testnet script * Mass replace for fluffy/Fluffy and portal_bridge in docs/comments * Address feedback regarding usage of binary name --- .github/workflows/ci.yml | 4 +- .github/workflows/kurtosis.yml | 10 +- .github/workflows/nimbus_docker_build.yml | 8 +- .github/workflows/nimbus_verified_proxy.yml | 2 +- .github/workflows/{fluffy.yml => portal.yml} | 72 +++++------ .../{fluffy_docs.yml => portal_docs.yml} | 10 +- ...y_docker.yml => portal_nightly_docker.yml} | 7 +- Makefile | 88 +++++++------ README.md | 4 +- execution_chain/db/era1_db/db_desc.nim | 6 +- execution_chain/rpc/rpc_types.nim | 4 +- .../docs/the_fluffy_book/docs/test-suite.md | 15 --- nimbus.nimble | 20 +-- {fluffy => portal}/README.md | 10 +- .../bridge/nimbus_portal_bridge.nim | 11 +- .../bridge/nimbus_portal_bridge.nim.cfg | 0 .../bridge/nimbus_portal_bridge_conf.nim | 26 ++-- .../bridge}/portal_bridge_beacon.nim | 12 +- .../bridge}/portal_bridge_common.nim | 8 +- .../bridge}/portal_bridge_history.nim | 23 ++-- .../bridge}/portal_bridge_state.nim | 12 +- .../bridge}/state_bridge/database.nim | 4 +- .../bridge}/state_bridge/offers_builder.nim | 6 +- .../bridge}/state_bridge/state_diff.nim | 6 +- .../bridge}/state_bridge/world_state.nim | 8 +- .../state_bridge/world_state_helper.nim | 6 +- .../client/nimbus_portal_client.nim | 116 +++++++++++------- .../client/nimbus_portal_client.nim.cfg | 0 .../client/nimbus_portal_client_conf.nim | 30 +++-- portal/common/common_deprecation.nim | 41 +++++++ {fluffy => portal}/common/common_types.nim | 0 {fluffy => portal}/common/common_utils.nim | 0 {fluffy => portal}/database/content_db.nim | 6 +- .../content_db_custom_sql_functions.nim | 2 +- {fluffy => portal}/database/era1_db.nim | 4 +- {fluffy/tools => portal}/docker/Dockerfile | 15 ++- .../tools => portal}/docker/Dockerfile.debug | 14 ++- .../docker/Dockerfile.debug.dockerignore | 0 .../docker/Dockerfile.debug.linux | 11 +- .../docs/the_fluffy_book/docs/CNAME | 0 .../the_fluffy_book/docs/access-content.md | 2 +- .../docs/adding-documentation.md | 4 +- .../docs/the_fluffy_book/docs/architecture.md | 28 ++--- .../docs/basics-for-developers.md | 6 +- .../docs/beacon-content-bridging.md | 12 +- .../the_fluffy_book/docs/build-from-source.md | 22 ++-- .../docs/calling-a-contract.md | 8 +- .../the_fluffy_book/docs/connect-to-portal.md | 8 +- .../docs/the_fluffy_book/docs/db_pruning.md | 10 +- .../the_fluffy_book/docs/eth-data-exporter.md | 0 .../docs/history-content-bridging.md | 30 ++--- .../docs/the_fluffy_book/docs/index.md | 12 +- .../docs/the_fluffy_book/docs/metrics.md | 12 +- .../docs/nimbus-portal-with-hive.md | 18 +-- .../the_fluffy_book/docs/prerequisites.md | 8 +- .../docs/protocol-interop-testing.md | 6 +- .../docs/quick-start-docker.md | 6 +- .../docs/quick-start-windows.md | 18 +-- .../docs/the_fluffy_book/docs/quick-start.md | 16 +-- .../the_fluffy_book/docs/run-local-testnet.md | 10 +- .../docs/state-content-bridging.md | 18 +-- .../docs/stylesheets/extra.css | 0 .../docs/the_fluffy_book/docs/test-suite.md | 15 +++ .../docs/testnet-beacon-network.md | 18 +-- .../docs/testnet-history-network.md | 2 +- .../docs/the_fluffy_book/docs/upgrade.md | 12 +- .../docs/the_fluffy_book/mkdocs.yml | 8 +- {fluffy => portal}/eth_data/era1.nim | 2 +- .../eth_data/history_data_json_store.nim | 0 .../eth_data/history_data_seeding.nim | 0 .../eth_data/history_data_ssz_e2s.nim | 0 .../eth_data/yaml_eth_types.nim | 2 +- {fluffy => portal}/eth_data/yaml_utils.nim | 4 +- {fluffy => portal}/evm/async_evm.nim | 2 +- {fluffy => portal}/evm/async_evm_backend.nim | 2 +- .../evm/async_evm_portal_backend.nim | 2 +- {fluffy => portal}/logging.nim | 4 +- .../grafana/fluffy_grafana_dashboard.json | 0 .../beacon/beacon_chain_historical_roots.nim | 0 .../beacon_chain_historical_summaries.nim | 2 +- .../network/beacon/beacon_content.nim | 4 +- .../network/beacon/beacon_db.nim | 2 +- .../network/beacon/beacon_init_loader.nim | 4 +- .../network/beacon/beacon_light_client.nim | 0 .../beacon/beacon_light_client_manager.nim | 4 +- .../network/beacon/beacon_network.nim | 2 +- .../network/beacon/beacon_validation.nim | 4 +- .../network/beacon/content/content_keys.nim | 4 +- .../network/beacon/content/content_values.nim | 4 +- .../network/history/content/content_keys.nim | 2 +- .../history/content/content_values.nim | 2 +- .../content/content_values_deprecated.nim | 2 +- .../network/history/history_content.nim | 0 .../network/history/history_network.nim | 4 +- .../history/history_type_conversions.nim | 4 +- .../network/history/history_validation.nim | 4 +- .../history/validation/block_proof_common.nim | 2 +- ...ck_proof_historical_hashes_accumulator.nim | 0 .../block_proof_historical_roots.nim | 2 +- .../block_proof_historical_summaries.nim | 2 +- .../historical_hashes_accumulator.nim | 2 +- .../network}/network_metadata.nim | 4 +- {fluffy => portal/network}/portal_node.nim | 14 +-- .../network/state/content/content_keys.nim | 4 +- .../network/state/content/content_values.nim | 4 +- .../network/state/content/nibbles.nim | 4 +- .../network/state/state_content.nim | 4 +- .../network/state/state_endpoints.nim | 2 +- .../network/state/state_gossip.nim | 4 +- .../network/state/state_network.nim | 0 .../network/state/state_utils.nim | 4 +- .../network/state/state_validation.nim | 2 +- {fluffy => portal}/network/wire/README.md | 22 +--- {fluffy => portal}/network/wire/messages.nim | 0 .../network/wire/ping_extensions.nim | 0 .../network/wire/portal_protocol.nim | 2 +- .../network/wire/portal_protocol_config.nim | 2 +- .../network/wire/portal_protocol_version.nim | 0 .../network/wire/portal_stream.nim | 2 +- {fluffy => portal}/nim.cfg | 2 +- {fluffy => portal}/rpc/eth_rpc_client.nim | 4 +- {fluffy => portal}/rpc/portal_rpc_client.nim | 4 +- .../rpc/rpc_calls/rpc_debug_calls.nim | 2 +- .../rpc/rpc_calls/rpc_discovery_calls.nim | 4 +- .../rpc/rpc_calls/rpc_eth_calls.nim | 2 +- .../rpc/rpc_calls/rpc_portal_calls.nim | 2 +- .../rpc/rpc_calls/rpc_portal_debug_calls.nim | 4 +- .../rpc/rpc_calls/rpc_trace_calls.nim | 4 +- {fluffy => portal}/rpc/rpc_debug_api.nim | 2 +- {fluffy => portal}/rpc/rpc_discovery_api.nim | 0 {fluffy => portal}/rpc/rpc_eth_api.nim | 6 +- .../rpc/rpc_portal_beacon_api.nim | 2 +- .../rpc/rpc_portal_common_api.nim | 2 +- .../rpc/rpc_portal_debug_history_api.nim | 6 +- .../rpc/rpc_portal_history_api.nim | 2 +- .../rpc/rpc_portal_nimbus_beacon_api.nim | 2 +- .../rpc/rpc_portal_state_api.nim | 2 +- {fluffy => portal}/rpc/rpc_types.nim | 2 +- .../scripts/launch_local_testnet.sh | 22 ++-- {fluffy => portal}/scripts/makedir.sh | 0 {fluffy => portal}/scripts/nim.cfg | 0 .../scripts/test_portal_testnet.nim | 4 +- .../tests/all_portal_tests.nim | 0 .../all_beacon_network_tests.nim | 2 +- .../beacon_test_helpers.nim | 0 .../light_client_test_data.nim | 0 .../test_beacon_content.nim | 4 +- .../test_beacon_historical_roots.nim | 0 .../test_beacon_historical_summaries.nim | 2 +- ...st_beacon_historical_summaries_vectors.nim | 2 +- .../test_beacon_light_client.nim | 0 .../test_beacon_network.nim | 2 +- .../tests/blocks/mainnet_blocks_1-2.json | 0 .../mainnet_blocks_1000001_1000010.json | 0 .../mainnet_blocks_1000011_1000030.json | 0 .../mainnet_blocks_1000040_1000050.json | 0 .../tests/blocks/mainnet_blocks_selected.json | 0 .../tests/custom_genesis/berlin2000.json | 0 .../tests/custom_genesis/calaveras.json | 0 .../tests/custom_genesis/chainid1.json | 0 .../tests/custom_genesis/chainid7.json | 0 .../tests/custom_genesis/devnet4.json | 0 .../tests/custom_genesis/devnet5.json | 0 .../tests/custom_genesis/holesky.json | 0 .../tests/custom_genesis/mainshadow1.json | 0 .../tests/custom_genesis/merge.json | 0 .../tests/evm/all_evm_tests.nim | 0 .../tests/evm/async_evm_test_backend.nim | 2 +- .../tests/evm/test_async_evm.nim | 2 +- ...all_history_network_custom_chain_tests.nim | 0 .../all_history_network_tests.nim | 0 .../test_block_proof_historical_roots.nim | 0 ...t_block_proof_historical_roots_vectors.nim | 4 +- .../test_block_proof_historical_summaries.nim | 0 ...block_proof_historical_summaries_deneb.nim | 0 ...ock_proof_historical_summaries_vectors.nim | 2 +- .../test_historical_hashes_accumulator.nim | 0 ...est_historical_hashes_accumulator_root.nim | 2 +- .../test_history_content.nim | 2 +- .../test_history_content_keys.nim | 0 .../test_history_content_validation.nim | 2 +- .../test_history_network.nim | 2 +- .../test_history_util.nim | 0 .../tests/rpc_tests/all_rpc_tests.nim | 0 .../tests/rpc_tests/test_discovery_rpc.nim | 0 .../rpc_tests/test_portal_rpc_client.nim | 0 .../all_state_network_tests.nim | 0 .../state_test_helpers.nim | 2 +- .../test_state_content_keys_vectors.nim | 4 +- .../test_state_content_nibbles.nim | 4 +- .../test_state_content_values_vectors.nim | 4 +- .../test_state_endpoints_genesis.nim | 4 +- .../test_state_endpoints_vectors.nim | 0 .../test_state_gossip_getparent_genesis.nim | 4 +- .../test_state_gossip_getparent_vectors.nim | 4 +- .../test_state_gossip_gossipoffer_vectors.nim | 4 +- .../test_state_network_getcontent_vectors.nim | 4 +- ...est_state_network_offercontent_vectors.nim | 4 +- .../test_state_validation_genesis.nim | 4 +- .../test_state_validation_trieproof.nim | 4 +- .../test_state_validation_vectors.nim | 4 +- {fluffy => portal}/tests/test_content_db.nim | 4 +- {fluffy => portal}/tests/test_helpers.nim | 0 .../all_wire_protocol_tests.nim | 0 .../test_ping_extensions_encoding.nim | 0 .../test_portal_wire_encoding.nim | 0 .../test_portal_wire_protocol.nim | 0 .../test_portal_wire_version.nim | 0 {fluffy => portal}/tools/benchmark.nim | 4 +- {fluffy => portal}/tools/blockwalk.nim | 0 .../tools/eth_data_exporter.nim | 2 +- .../eth_data_exporter/cl_data_exporter.nim | 4 +- .../tools/eth_data_exporter/downloader.nim | 0 .../eth_data_exporter/exporter_common.nim | 0 .../tools/eth_data_exporter/exporter_conf.nim | 0 .../tools/eth_data_exporter/parser.nim | 0 {fluffy => portal}/tools/fcli_db.nim | 2 +- {fluffy => portal}/tools/portalcli.nim | 2 +- .../tools/utp_testing/README.md | 6 +- .../tools/utp_testing/docker/Dockerfile | 0 .../utp_testing/docker/docker-compose.yml | 0 .../tools/utp_testing/docker/run_endpoint.sh | 0 .../tools/utp_testing/docker/setup.sh | 0 .../tools/utp_testing/utp_rpc_types.nim | 0 .../tools/utp_testing/utp_test.nim | 4 +- .../tools/utp_testing/utp_test_app.nim | 0 .../tools/utp_testing/utp_test_rpc_calls.nim | 0 .../tools/utp_testing/utp_test_rpc_client.nim | 0 {fluffy => portal}/version.nim | 14 ++- 229 files changed, 688 insertions(+), 592 deletions(-) rename .github/workflows/{fluffy.yml => portal.yml} (87%) rename .github/workflows/{fluffy_docs.yml => portal_docs.yml} (86%) rename .github/workflows/{fluffy_nightly_docker.yml => portal_nightly_docker.yml} (78%) delete mode 100644 fluffy/docs/the_fluffy_book/docs/test-suite.md rename {fluffy => portal}/README.md (77%) rename fluffy/tools/portal_bridge/portal_bridge.nim => portal/bridge/nimbus_portal_bridge.nim (88%) rename fluffy/tools/portal_bridge/nim.cfg => portal/bridge/nimbus_portal_bridge.nim.cfg (100%) rename fluffy/tools/portal_bridge/portal_bridge_conf.nim => portal/bridge/nimbus_portal_bridge_conf.nim (94%) rename {fluffy/tools/portal_bridge => portal/bridge}/portal_bridge_beacon.nim (98%) rename {fluffy/tools/portal_bridge => portal/bridge}/portal_bridge_common.nim (95%) rename {fluffy/tools/portal_bridge => portal/bridge}/portal_bridge_history.nim (96%) rename {fluffy/tools/portal_bridge => portal/bridge}/portal_bridge_state.nim (99%) rename {fluffy/tools/portal_bridge => portal/bridge}/state_bridge/database.nim (98%) rename {fluffy/tools/portal_bridge => portal/bridge}/state_bridge/offers_builder.nim (96%) rename {fluffy/tools/portal_bridge => portal/bridge}/state_bridge/state_diff.nim (97%) rename {fluffy/tools/portal_bridge => portal/bridge}/state_bridge/world_state.nim (98%) rename {fluffy/tools/portal_bridge => portal/bridge}/state_bridge/world_state_helper.nim (96%) rename fluffy/fluffy.nim => portal/client/nimbus_portal_client.nim (80%) rename fluffy/fluffy.nim.cfg => portal/client/nimbus_portal_client.nim.cfg (100%) rename fluffy/conf.nim => portal/client/nimbus_portal_client_conf.nim (95%) create mode 100644 portal/common/common_deprecation.nim rename {fluffy => portal}/common/common_types.nim (100%) rename {fluffy => portal}/common/common_utils.nim (100%) rename {fluffy => portal}/database/content_db.nim (99%) rename {fluffy => portal}/database/content_db_custom_sql_functions.nim (99%) rename {fluffy => portal}/database/era1_db.nim (97%) rename {fluffy/tools => portal}/docker/Dockerfile (75%) rename {fluffy/tools => portal}/docker/Dockerfile.debug (78%) rename {fluffy/tools => portal}/docker/Dockerfile.debug.dockerignore (100%) rename {fluffy/tools => portal}/docker/Dockerfile.debug.linux (58%) rename {fluffy => portal}/docs/the_fluffy_book/docs/CNAME (100%) rename {fluffy => portal}/docs/the_fluffy_book/docs/access-content.md (94%) rename {fluffy => portal}/docs/the_fluffy_book/docs/adding-documentation.md (80%) rename {fluffy => portal}/docs/the_fluffy_book/docs/architecture.md (61%) rename {fluffy => portal}/docs/the_fluffy_book/docs/basics-for-developers.md (79%) rename {fluffy => portal}/docs/the_fluffy_book/docs/beacon-content-bridging.md (61%) rename {fluffy => portal}/docs/the_fluffy_book/docs/build-from-source.md (52%) rename {fluffy => portal}/docs/the_fluffy_book/docs/calling-a-contract.md (83%) rename {fluffy => portal}/docs/the_fluffy_book/docs/connect-to-portal.md (60%) rename {fluffy => portal}/docs/the_fluffy_book/docs/db_pruning.md (57%) rename {fluffy => portal}/docs/the_fluffy_book/docs/eth-data-exporter.md (100%) rename {fluffy => portal}/docs/the_fluffy_book/docs/history-content-bridging.md (65%) rename {fluffy => portal}/docs/the_fluffy_book/docs/index.md (85%) rename {fluffy => portal}/docs/the_fluffy_book/docs/metrics.md (74%) rename fluffy/docs/the_fluffy_book/docs/fluffy-with-hive.md => portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md (65%) rename {fluffy => portal}/docs/the_fluffy_book/docs/prerequisites.md (82%) rename {fluffy => portal}/docs/the_fluffy_book/docs/protocol-interop-testing.md (94%) rename {fluffy => portal}/docs/the_fluffy_book/docs/quick-start-docker.md (82%) rename {fluffy => portal}/docs/the_fluffy_book/docs/quick-start-windows.md (79%) rename {fluffy => portal}/docs/the_fluffy_book/docs/quick-start.md (82%) rename {fluffy => portal}/docs/the_fluffy_book/docs/run-local-testnet.md (64%) rename {fluffy => portal}/docs/the_fluffy_book/docs/state-content-bridging.md (82%) rename {fluffy => portal}/docs/the_fluffy_book/docs/stylesheets/extra.css (100%) create mode 100644 portal/docs/the_fluffy_book/docs/test-suite.md rename {fluffy => portal}/docs/the_fluffy_book/docs/testnet-beacon-network.md (65%) rename {fluffy => portal}/docs/the_fluffy_book/docs/testnet-history-network.md (98%) rename {fluffy => portal}/docs/the_fluffy_book/docs/upgrade.md (52%) rename {fluffy => portal}/docs/the_fluffy_book/mkdocs.yml (94%) rename {fluffy => portal}/eth_data/era1.nim (99%) rename {fluffy => portal}/eth_data/history_data_json_store.nim (100%) rename {fluffy => portal}/eth_data/history_data_seeding.nim (100%) rename {fluffy => portal}/eth_data/history_data_ssz_e2s.nim (100%) rename {fluffy => portal}/eth_data/yaml_eth_types.nim (99%) rename {fluffy => portal}/eth_data/yaml_utils.nim (96%) rename {fluffy => portal}/evm/async_evm.nim (99%) rename {fluffy => portal}/evm/async_evm_backend.nim (99%) rename {fluffy => portal}/evm/async_evm_portal_backend.nim (99%) rename {fluffy => portal}/logging.nim (90%) rename {fluffy => portal/metrics}/grafana/fluffy_grafana_dashboard.json (100%) rename {fluffy => portal}/network/beacon/beacon_chain_historical_roots.nim (100%) rename {fluffy => portal}/network/beacon/beacon_chain_historical_summaries.nim (99%) rename {fluffy => portal}/network/beacon/beacon_content.nim (88%) rename {fluffy => portal}/network/beacon/beacon_db.nim (99%) rename {fluffy => portal}/network/beacon/beacon_init_loader.nim (96%) rename {fluffy => portal}/network/beacon/beacon_light_client.nim (100%) rename {fluffy => portal}/network/beacon/beacon_light_client_manager.nim (99%) rename {fluffy => portal}/network/beacon/beacon_network.nim (99%) rename {fluffy => portal}/network/beacon/beacon_validation.nim (93%) rename {fluffy => portal}/network/beacon/content/content_keys.nim (98%) rename {fluffy => portal}/network/beacon/content/content_values.nim (98%) rename {fluffy => portal}/network/history/content/content_keys.nim (99%) rename {fluffy => portal}/network/history/content/content_values.nim (99%) rename {fluffy => portal}/network/history/content/content_values_deprecated.nim (99%) rename {fluffy => portal}/network/history/history_content.nim (100%) rename {fluffy => portal}/network/history/history_network.nim (99%) rename {fluffy => portal}/network/history/history_type_conversions.nim (98%) rename {fluffy => portal}/network/history/history_validation.nim (99%) rename {fluffy => portal}/network/history/validation/block_proof_common.nim (99%) rename {fluffy => portal}/network/history/validation/block_proof_historical_hashes_accumulator.nim (100%) rename {fluffy => portal}/network/history/validation/block_proof_historical_roots.nim (99%) rename {fluffy => portal}/network/history/validation/block_proof_historical_summaries.nim (99%) rename {fluffy => portal}/network/history/validation/historical_hashes_accumulator.nim (98%) rename {fluffy => portal/network}/network_metadata.nim (96%) rename {fluffy => portal/network}/portal_node.nim (96%) rename {fluffy => portal}/network/state/content/content_keys.nim (98%) rename {fluffy => portal}/network/state/content/content_values.nim (98%) rename {fluffy => portal}/network/state/content/nibbles.nim (97%) rename {fluffy => portal}/network/state/state_content.nim (87%) rename {fluffy => portal}/network/state/state_endpoints.nim (99%) rename {fluffy => portal}/network/state/state_gossip.nim (98%) rename {fluffy => portal}/network/state/state_network.nim (100%) rename {fluffy => portal}/network/state/state_utils.nim (98%) rename {fluffy => portal}/network/state/state_validation.nim (99%) rename {fluffy => portal}/network/wire/README.md (72%) rename {fluffy => portal}/network/wire/messages.nim (100%) rename {fluffy => portal}/network/wire/ping_extensions.nim (100%) rename {fluffy => portal}/network/wire/portal_protocol.nim (99%) rename {fluffy => portal}/network/wire/portal_protocol_config.nim (99%) rename {fluffy => portal}/network/wire/portal_protocol_version.nim (100%) rename {fluffy => portal}/network/wire/portal_stream.nim (99%) rename {fluffy => portal}/nim.cfg (98%) rename {fluffy => portal}/rpc/eth_rpc_client.nim (86%) rename {fluffy => portal}/rpc/portal_rpc_client.nim (98%) rename {fluffy => portal}/rpc/rpc_calls/rpc_debug_calls.nim (99%) rename {fluffy => portal}/rpc/rpc_calls/rpc_discovery_calls.nim (94%) rename {fluffy => portal}/rpc/rpc_calls/rpc_eth_calls.nim (99%) rename {fluffy => portal}/rpc/rpc_calls/rpc_portal_calls.nim (99%) rename {fluffy => portal}/rpc/rpc_calls/rpc_portal_debug_calls.nim (92%) rename {fluffy => portal}/rpc/rpc_calls/rpc_trace_calls.nim (90%) rename {fluffy => portal}/rpc/rpc_debug_api.nim (99%) rename {fluffy => portal}/rpc/rpc_discovery_api.nim (100%) rename {fluffy => portal}/rpc/rpc_eth_api.nim (99%) rename {fluffy => portal}/rpc/rpc_portal_beacon_api.nim (99%) rename {fluffy => portal}/rpc/rpc_portal_common_api.nim (99%) rename {fluffy => portal}/rpc/rpc_portal_debug_history_api.nim (89%) rename {fluffy => portal}/rpc/rpc_portal_history_api.nim (99%) rename {fluffy => portal}/rpc/rpc_portal_nimbus_beacon_api.nim (91%) rename {fluffy => portal}/rpc/rpc_portal_state_api.nim (99%) rename {fluffy => portal}/rpc/rpc_types.nim (99%) rename {fluffy => portal}/scripts/launch_local_testnet.sh (95%) rename {fluffy => portal}/scripts/makedir.sh (100%) rename {fluffy => portal}/scripts/nim.cfg (100%) rename {fluffy => portal}/scripts/test_portal_testnet.nim (99%) rename fluffy/tests/all_fluffy_tests.nim => portal/tests/all_portal_tests.nim (100%) rename {fluffy => portal}/tests/beacon_network_tests/all_beacon_network_tests.nim (98%) rename {fluffy => portal}/tests/beacon_network_tests/beacon_test_helpers.nim (100%) rename {fluffy => portal}/tests/beacon_network_tests/light_client_test_data.nim (100%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_content.nim (99%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_historical_roots.nim (100%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_historical_summaries.nim (99%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_historical_summaries_vectors.nim (99%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_light_client.nim (100%) rename {fluffy => portal}/tests/beacon_network_tests/test_beacon_network.nim (99%) rename {fluffy => portal}/tests/blocks/mainnet_blocks_1-2.json (100%) rename {fluffy => portal}/tests/blocks/mainnet_blocks_1000001_1000010.json (100%) rename {fluffy => portal}/tests/blocks/mainnet_blocks_1000011_1000030.json (100%) rename {fluffy => portal}/tests/blocks/mainnet_blocks_1000040_1000050.json (100%) rename {fluffy => portal}/tests/blocks/mainnet_blocks_selected.json (100%) rename {fluffy => portal}/tests/custom_genesis/berlin2000.json (100%) rename {fluffy => portal}/tests/custom_genesis/calaveras.json (100%) rename {fluffy => portal}/tests/custom_genesis/chainid1.json (100%) rename {fluffy => portal}/tests/custom_genesis/chainid7.json (100%) rename {fluffy => portal}/tests/custom_genesis/devnet4.json (100%) rename {fluffy => portal}/tests/custom_genesis/devnet5.json (100%) rename {fluffy => portal}/tests/custom_genesis/holesky.json (100%) rename {fluffy => portal}/tests/custom_genesis/mainshadow1.json (100%) rename {fluffy => portal}/tests/custom_genesis/merge.json (100%) rename {fluffy => portal}/tests/evm/all_evm_tests.nim (100%) rename {fluffy => portal}/tests/evm/async_evm_test_backend.nim (99%) rename {fluffy => portal}/tests/evm/test_async_evm.nim (99%) rename {fluffy => portal}/tests/history_network_tests/all_history_network_custom_chain_tests.nim (100%) rename {fluffy => portal}/tests/history_network_tests/all_history_network_tests.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_block_proof_historical_roots.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_block_proof_historical_roots_vectors.nim (97%) rename {fluffy => portal}/tests/history_network_tests/test_block_proof_historical_summaries.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_block_proof_historical_summaries_deneb.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_block_proof_historical_summaries_vectors.nim (99%) rename {fluffy => portal}/tests/history_network_tests/test_historical_hashes_accumulator.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_historical_hashes_accumulator_root.nim (96%) rename {fluffy => portal}/tests/history_network_tests/test_history_content.nim (99%) rename {fluffy => portal}/tests/history_network_tests/test_history_content_keys.nim (100%) rename {fluffy => portal}/tests/history_network_tests/test_history_content_validation.nim (98%) rename {fluffy => portal}/tests/history_network_tests/test_history_network.nim (99%) rename {fluffy => portal}/tests/history_network_tests/test_history_util.nim (100%) rename {fluffy => portal}/tests/rpc_tests/all_rpc_tests.nim (100%) rename {fluffy => portal}/tests/rpc_tests/test_discovery_rpc.nim (100%) rename {fluffy => portal}/tests/rpc_tests/test_portal_rpc_client.nim (100%) rename {fluffy => portal}/tests/state_network_tests/all_state_network_tests.nim (100%) rename {fluffy => portal}/tests/state_network_tests/state_test_helpers.nim (99%) rename {fluffy => portal}/tests/state_network_tests/test_state_content_keys_vectors.nim (98%) rename {fluffy => portal}/tests/state_network_tests/test_state_content_nibbles.nim (97%) rename {fluffy => portal}/tests/state_network_tests/test_state_content_values_vectors.nim (98%) rename {fluffy => portal}/tests/state_network_tests/test_state_endpoints_genesis.nim (98%) rename {fluffy => portal}/tests/state_network_tests/test_state_endpoints_vectors.nim (100%) rename {fluffy => portal}/tests/state_network_tests/test_state_gossip_getparent_genesis.nim (97%) rename {fluffy => portal}/tests/state_network_tests/test_state_gossip_getparent_vectors.nim (97%) rename {fluffy => portal}/tests/state_network_tests/test_state_gossip_gossipoffer_vectors.nim (99%) rename {fluffy => portal}/tests/state_network_tests/test_state_network_getcontent_vectors.nim (99%) rename {fluffy => portal}/tests/state_network_tests/test_state_network_offercontent_vectors.nim (99%) rename {fluffy => portal}/tests/state_network_tests/test_state_validation_genesis.nim (97%) rename {fluffy => portal}/tests/state_network_tests/test_state_validation_trieproof.nim (99%) rename {fluffy => portal}/tests/state_network_tests/test_state_validation_vectors.nim (99%) rename {fluffy => portal}/tests/test_content_db.nim (98%) rename {fluffy => portal}/tests/test_helpers.nim (100%) rename {fluffy => portal}/tests/wire_protocol_tests/all_wire_protocol_tests.nim (100%) rename {fluffy => portal}/tests/wire_protocol_tests/test_ping_extensions_encoding.nim (100%) rename {fluffy => portal}/tests/wire_protocol_tests/test_portal_wire_encoding.nim (100%) rename {fluffy => portal}/tests/wire_protocol_tests/test_portal_wire_protocol.nim (100%) rename {fluffy => portal}/tests/wire_protocol_tests/test_portal_wire_version.nim (100%) rename {fluffy => portal}/tools/benchmark.nim (94%) rename {fluffy => portal}/tools/blockwalk.nim (100%) rename {fluffy => portal}/tools/eth_data_exporter.nim (99%) rename {fluffy => portal}/tools/eth_data_exporter/cl_data_exporter.nim (99%) rename {fluffy => portal}/tools/eth_data_exporter/downloader.nim (100%) rename {fluffy => portal}/tools/eth_data_exporter/exporter_common.nim (100%) rename {fluffy => portal}/tools/eth_data_exporter/exporter_conf.nim (100%) rename {fluffy => portal}/tools/eth_data_exporter/parser.nim (100%) rename {fluffy => portal}/tools/fcli_db.nim (99%) rename {fluffy => portal}/tools/portalcli.nim (99%) rename {fluffy => portal}/tools/utp_testing/README.md (92%) rename {fluffy => portal}/tools/utp_testing/docker/Dockerfile (100%) rename {fluffy => portal}/tools/utp_testing/docker/docker-compose.yml (100%) rename {fluffy => portal}/tools/utp_testing/docker/run_endpoint.sh (100%) rename {fluffy => portal}/tools/utp_testing/docker/setup.sh (100%) rename {fluffy => portal}/tools/utp_testing/utp_rpc_types.nim (100%) rename {fluffy => portal}/tools/utp_testing/utp_test.nim (99%) rename {fluffy => portal}/tools/utp_testing/utp_test_app.nim (100%) rename {fluffy => portal}/tools/utp_testing/utp_test_rpc_calls.nim (100%) rename {fluffy => portal}/tools/utp_testing/utp_test_rpc_client.nim (100%) rename {fluffy => portal}/version.nim (86%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8fb4ea25a7..6f6e456579 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,8 +17,8 @@ on: - '**/*.md' - '**/*.yml' - 'hive_integration/**' - - 'fluffy/**' - - '.github/workflows/fluffy*.yml' + - 'portal/**' + - '.github/workflows/portal*.yml' - 'nimbus_verified_proxy/**' - '.github/workflows/nimbus_verified_proxy.yml' diff --git a/.github/workflows/kurtosis.yml b/.github/workflows/kurtosis.yml index c902873638..6098d46d4f 100644 --- a/.github/workflows/kurtosis.yml +++ b/.github/workflows/kurtosis.yml @@ -19,8 +19,8 @@ on: - 'docs/**' - '**/*.md' - 'hive_integration/**' - - 'fluffy/**' - - '.github/workflows/fluffy*.yml' + - 'portal/**' + - '.github/workflows/portal*.yml' - 'nimbus_verified_proxy/**' - '.github/workflows/nimbus_verified_proxy.yml' pull_request: @@ -31,8 +31,8 @@ on: - 'docs/**' - '**/*.md' - 'hive_integration/**' - - 'fluffy/**' - - '.github/workflows/fluffy*.yml' + - 'portal/**' + - '.github/workflows/portal*.yml' - 'nimbus_verified_proxy/**' - '.github/workflows/nimbus_verified_proxy.yml' @@ -282,4 +282,4 @@ jobs: echo "" exit 1 # fail action - fi \ No newline at end of file + fi diff --git a/.github/workflows/nimbus_docker_build.yml b/.github/workflows/nimbus_docker_build.yml index 374e1e3476..c67b8be2dd 100644 --- a/.github/workflows/nimbus_docker_build.yml +++ b/.github/workflows/nimbus_docker_build.yml @@ -15,12 +15,12 @@ on: branches: - 'master' paths-ignore: - - 'fluffy/**' + - 'portal/**' - '**/*.md' - - '.github/workflows/fluffy*.yml' + - '.github/workflows/portal*.yml' - 'nimbus_verified_proxy/**' - '.github/workflows/nimbus_verified_proxy.yml' - + workflow_dispatch: env: @@ -130,7 +130,7 @@ jobs: jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON" docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ $(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *) - + - name: Inspect image run: | docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/nimbus_verified_proxy.yml b/.github/workflows/nimbus_verified_proxy.yml index 50eaa2b2d8..3ffc68fa4d 100644 --- a/.github/workflows/nimbus_verified_proxy.yml +++ b/.github/workflows/nimbus_verified_proxy.yml @@ -236,7 +236,7 @@ jobs: - name: Check nph formatting # Pin nph to a specific version to avoid sudden style differences. # Updating nph version should be accompanied with running the new - # version on the fluffy directory. + # version on the nimbus_verified_proxy directory. run: | VERSION="v0.6.1" ARCHIVE="nph-linux_x64.tar.gz" diff --git a/.github/workflows/fluffy.yml b/.github/workflows/portal.yml similarity index 87% rename from .github/workflows/fluffy.yml rename to .github/workflows/portal.yml index 2b39604cd4..7a3de17fdd 100644 --- a/.github/workflows/fluffy.yml +++ b/.github/workflows/portal.yml @@ -1,18 +1,18 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. -name: Fluffy CI +name: Nimbus Portal CI on: push: paths: - - '.github/workflows/fluffy.yml' - - 'fluffy/**' - - '!fluffy/**.md' - - '!fluffy/docs/**' + - '.github/workflows/portal.yml' + - 'portal/**' + - '!portal/**.md' + - '!portal/docs/**' - 'execution_chain/rpc/hexstrings.nim' - 'execution_chain/rpc/rpc_*.nim' - 'execution_chain/db/**' @@ -22,10 +22,10 @@ on: pull_request: paths: - - '.github/workflows/fluffy.yml' - - 'fluffy/**' - - '!fluffy/**.md' - - '!fluffy/docs/**' + - '.github/workflows/portal.yml' + - 'portal/**' + - '!portal/**.md' + - '!portal/docs/**' - 'execution_chain/rpc/hexstrings.nim' - 'execution_chain/rpc/rpc_*.nim' - 'execution_chain/db/**' @@ -68,7 +68,7 @@ jobs: uses: actions/cache@v4 with: path: NimBinaries - key: 'nim-linux-amd64-${{ steps.versions.outputs.nimbus_build_system }}-fluffy' + key: 'nim-linux-amd64-${{ steps.versions.outputs.nimbus_build_system }}-portal' - name: Build Nim and Nimbus-eth1 dependencies run: | @@ -76,13 +76,13 @@ jobs: - name: build uTP test app container run: | - docker build -t test-utp --no-cache --build-arg BRANCH_NAME=${{ github.ref_name }} fluffy/tools/utp_testing/docker + docker build -t test-utp --no-cache --build-arg BRANCH_NAME=${{ github.ref_name }} portal/tools/utp_testing/docker - name: run test app with simulator run: | : find / -name docker-compose -printf "%h\n%f\n%m\n\n" 2>/dev/null PATH=$PATH$(find /usr/libexec/docker -name docker-compose -printf ":%h") - SCENARIO="drop-rate --delay=15ms --bandwidth=10Mbps --queue=25 --rate_to_client=10 --rate_to_server=10" docker-compose -f fluffy/tools/utp_testing/docker/docker-compose.yml up -d + SCENARIO="drop-rate --delay=15ms --bandwidth=10Mbps --queue=25 --rate_to_client=10 --rate_to_server=10" docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml up -d - name: wait 5 seconds for containers to start run: | @@ -102,7 +102,7 @@ jobs: if: always() run: | PATH=$PATH$(find /usr/libexec/docker -name docker-compose -printf ":%h") - docker-compose -f fluffy/tools/utp_testing/docker/docker-compose.yml down + docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml down build: strategy: @@ -207,7 +207,7 @@ jobs: uses: actions/cache@v4 with: path: external/dlls-${{ matrix.target.cpu }} - key: 'dlls-${{ matrix.target.cpu }}-fluffy' + key: 'dlls-${{ matrix.target.cpu }}-portal' - name: Install llvm-mingw dependency (Windows) if: > @@ -258,7 +258,7 @@ jobs: uses: actions/cache@v4 with: path: NimBinaries - key: 'nim-${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ steps.versions.outputs.nimbus_build_system }}-fluffy' + key: 'nim-${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ steps.versions.outputs.nimbus_build_system }}-portal' - name: Build Nim and Nimbus-eth1 dependencies run: | @@ -266,21 +266,21 @@ jobs: # using the same glibc version. env CC=gcc make -j${ncpu} ARCH_OVERRIDE=${PLATFORM} CI_CACHE=NimBinaries update-from-ci - - name: Run fluffy tests (Windows) + - name: Run Nimbus Portal tests (Windows) if: runner.os == 'Windows' run: | gcc --version DEFAULT_MAKE_FLAGS="-j1" - mingw32-make ${DEFAULT_MAKE_FLAGS} fluffy - build/fluffy.exe --help + mingw32-make ${DEFAULT_MAKE_FLAGS} nimbus_portal_client + build/nimbus_portal_client.exe --help find . -type d -name ".git" -exec rm -rf {} + rm -rf nimcache - mingw32-make ${DEFAULT_MAKE_FLAGS} fluffy-tools + mingw32-make ${DEFAULT_MAKE_FLAGS} portal-tools rm -rf nimcache - mingw32-make fluffy-test + mingw32-make portal-test rm -rf nimcache - - name: Run fluffy tests (Linux) + - name: Run Nimbus Portal tests (Linux) if: runner.os == 'Linux' run: | gcc --version @@ -288,28 +288,28 @@ jobs: ldd --version export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" DEFAULT_MAKE_FLAGS="-j${ncpu}" - env CC=gcc make ${DEFAULT_MAKE_FLAGS} fluffy - build/fluffy --help - env CC=gcc make ${DEFAULT_MAKE_FLAGS} fluffy-tools + env CC=gcc make ${DEFAULT_MAKE_FLAGS} nimbus_portal_client + build/nimbus_portal_client --help + env CC=gcc make ${DEFAULT_MAKE_FLAGS} portal-tools # CC is needed to select correct compiler 32/64 bit - env CC=gcc CXX=g++ make fluffy-test + env CC=gcc CXX=g++ make portal-test - - name: Run fluffy tests (Macos) + - name: Run Nimbus Portal tests (Macos) if: runner.os == 'Macos' run: | DEFAULT_MAKE_FLAGS="-j${ncpu}" - make ${DEFAULT_MAKE_FLAGS} fluffy - build/fluffy --help - make ${DEFAULT_MAKE_FLAGS} fluffy-tools + make ${DEFAULT_MAKE_FLAGS} nimbus_portal_client + build/nimbus_portal_client --help + make ${DEFAULT_MAKE_FLAGS} portal-tools # "-static" option will not work for osx unless static system libraries are provided - make fluffy-test + make portal-test - - name: Run fluffy testnet + - name: Run Portal testnet run: | - ./fluffy/scripts/launch_local_testnet.sh --nodes=64 --run-tests + ./portal/scripts/launch_local_testnet.sh --nodes=64 --run-tests lint: - name: "Lint Fluffy" + name: "Lint Nimbus Portal" runs-on: ubuntu-latest steps: - name: Checkout @@ -320,13 +320,13 @@ jobs: - name: Check nph formatting # Pin nph to a specific version to avoid sudden style differences. # Updating nph version should be accompanied with running the new - # version on the fluffy directory. + # version on the portal directory. run: | VERSION="v0.6.1" ARCHIVE="nph-linux_x64.tar.gz" curl -L "https://github.com/arnetheduck/nph/releases/download/${VERSION}/${ARCHIVE}" -o ${ARCHIVE} tar -xzf ${ARCHIVE} - ./nph fluffy/ + ./nph portal/ git diff --exit-code - name: Check copyright year diff --git a/.github/workflows/fluffy_docs.yml b/.github/workflows/portal_docs.yml similarity index 86% rename from .github/workflows/fluffy_docs.yml rename to .github/workflows/portal_docs.yml index ed683e90d8..397fd4d671 100644 --- a/.github/workflows/fluffy_docs.yml +++ b/.github/workflows/portal_docs.yml @@ -1,19 +1,19 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. -name: Fluffy docs CI +name: Nimbus Portal docs CI on: push: branches: - master paths: - - '.github/workflows/fluffy_docs.yml' - - 'fluffy/docs/**' + - '.github/workflows/portal_docs.yml' + - 'portal/docs/**' permissions: contents: write @@ -34,6 +34,6 @@ jobs: mkdocs-material- - run: pip install mkdocs-material mkdocs-mermaid2-plugin - name: Run mkdocs github deploy - working-directory: ./fluffy/docs/the_fluffy_book/ + working-directory: ./portal/docs/the_fluffy_book/ run: | mkdocs gh-deploy --force diff --git a/.github/workflows/fluffy_nightly_docker.yml b/.github/workflows/portal_nightly_docker.yml similarity index 78% rename from .github/workflows/fluffy_nightly_docker.yml rename to .github/workflows/portal_nightly_docker.yml index 1ecde2d85b..3cf06b887a 100644 --- a/.github/workflows/fluffy_nightly_docker.yml +++ b/.github/workflows/portal_nightly_docker.yml @@ -1,11 +1,11 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2024 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. -name: Fluffy nightly Docker build +name: Nimbus Portal nightly Docker build on: schedule: - cron: "30 0 * * *" @@ -28,5 +28,6 @@ jobs: - name: Build and push a nightly Docker image run: | REFNAME="${{ github.ref_name }}" - DOCKER_BUILDKIT=1 docker build -f ./fluffy/tools/docker/Dockerfile -t statusim/nimbus-fluffy:amd64-${REFNAME}-latest . + DOCKER_BUILDKIT=1 docker build -f ./portal/docker/Dockerfile -t statusim/nimbus-fluffy:amd64-${REFNAME}-latest -t statusim/nimbus-portal-client:amd64-${REFNAME}-latest . docker push statusim/nimbus-fluffy:amd64-${REFNAME}-latest + docker push statusim/nimbus-portal-client:amd64-${REFNAME}-latest diff --git a/Makefile b/Makefile index 150e6a4c43..b1bfbacde6 100644 --- a/Makefile +++ b/Makefile @@ -68,19 +68,19 @@ TOOLS_DIRS := \ # comma-separated values for the "clean" target TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(TOOLS)) -# Fluffy debugging tools + testing tools -FLUFFY_TOOLS := \ - portal_bridge \ +# Portal debugging tools + testing tools +PORTAL_TOOLS := \ + nimbus_portal_bridge \ eth_data_exporter \ blockwalk \ portalcli \ fcli_db -FLUFFY_TOOLS_DIRS := \ - fluffy/tools/portal_bridge \ - fluffy/tools/state_bridge \ - fluffy/tools +PORTAL_TOOLS_DIRS := \ + portal/bridge \ + portal/bridge/state_bridge \ + portal/tools # comma-separated values for the "clean" target -FLUFFY_TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(FLUFFY_TOOLS)) +PORTAL_TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(FLUFFY_TOOLS)) # Namespaced variables to avoid conflicts with other makefiles OS_PLATFORM = $(shell $(CC) -dumpmachine) @@ -104,6 +104,7 @@ VERIF_PROXY_OUT_PATH ?= build/libverifproxy/ update \ nimbus \ nimbus_execution_client \ + nimbus_portal_client \ fluffy \ nimbus_verified_proxy \ libverifproxy \ @@ -250,7 +251,7 @@ test-evm: | build deps rocksdb build_fuzzers: $(ENV_SCRIPT) nim build_fuzzers $(NIM_PARAMS) nimbus.nims - + # Primitive reproducibility test. # # On some platforms, with some GCC versions, it may not be possible to get a @@ -265,49 +266,50 @@ test-reproducibility: [ "$$MD5SUM1" = "$$MD5SUM2" ] && echo -e "\e[92mSuccess: identical binaries.\e[39m" || \ { echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; } -# Fluffy related targets +# Portal related targets -# builds the fluffy client -fluffy: | build deps +nimbus_portal_client: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "fluffy/$@.nim" - -# primitive reproducibility test -fluffy-test-reproducibility: - + [ -e build/fluffy ] || $(MAKE) V=0 fluffy; \ - MD5SUM1=$$($(MD5SUM) build/fluffy | cut -d ' ' -f 1) && \ - rm -rf nimcache/*/fluffy && \ - $(MAKE) V=0 fluffy && \ - MD5SUM2=$$($(MD5SUM) build/fluffy | cut -d ' ' -f 1) && \ + $(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "portal/client/$@.nim" + +# alias for nimbus_portal_client +portal: | nimbus_portal_client + +# primitive reproducibility test for nimbus_portal_client +portal-test-reproducibility: + + [ -e build/portal ] || $(MAKE) V=0 nimbus_portal_client; \ + MD5SUM1=$$($(MD5SUM) build/nimbus_portal_client | cut -d ' ' -f 1) && \ + rm -rf nimcache/*/nimbus_portal_client && \ + $(MAKE) V=0 nimbus_portal_client && \ + MD5SUM2=$$($(MD5SUM) build/nimbus_portal_client | cut -d ' ' -f 1) && \ [ "$$MD5SUM1" = "$$MD5SUM2" ] && echo -e "\e[92mSuccess: identical binaries.\e[39m" || \ { echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; } -# fluffy tests +# Portal tests all_history_network_custom_chain_tests: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/history_network_tests/$@.nim" - + $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "portal/tests/history_network_tests/$@.nim" -all_fluffy_tests: | build deps +all_portal_tests: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/$@.nim" + $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "portal/tests/$@.nim" -# builds and runs the fluffy test suite -fluffy-test: | all_fluffy_tests all_history_network_custom_chain_tests +# builds and runs the Portal test suite +portal-test: | all_portal_tests all_history_network_custom_chain_tests -# builds the fluffy tools, wherever they are -$(FLUFFY_TOOLS): | build deps rocksdb - for D in $(FLUFFY_TOOLS_DIRS); do [ -e "$${D}/$@.nim" ] && TOOL_DIR="$${D}" && break; done && \ +# builds the Portal tools, wherever they are +$(PORTAL_TOOLS): | build deps rocksdb + for D in $(PORTAL_TOOLS_DIRS); do [ -e "$${D}/$@.nim" ] && TOOL_DIR="$${D}" && break; done && \ echo -e $(BUILD_MSG) "build/$@" && \ $(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "$${TOOL_DIR}/$@.nim" -# builds all the fluffy tools -fluffy-tools: | $(FLUFFY_TOOLS) +# builds all the Portal tools +portal-tools: | $(PORTAL_TOOLS) -# Build fluffy test_portal_testnet +# Build test_portal_testnet test_portal_testnet: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c $(NIM_PARAMS) -o:build/$@ "fluffy/scripts/$@.nim" + $(ENV_SCRIPT) nim c $(NIM_PARAMS) -o:build/$@ "portal/scripts/$@.nim" # builds the uTP test app utp-test-app: | build deps @@ -317,6 +319,20 @@ utp-test-app: | build deps utp-test: | build deps $(ENV_SCRIPT) nim utp_test $(NIM_PARAMS) nimbus.nims +# Deprecated legacy targets, to be removed sometime in the future + +# Legacy target, same as nimbus_portal_client, deprecated +fluffy: | build deps + echo -e "\033[0;31mWarning:\033[0m The fluffy target and binary is deprecated, use 'make nimbus_portal_client' instead" + echo -e $(BUILD_MSG) "build/$@" && \ + $(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "portal/client/nimbus_portal_client.nim" + +# Legacy target, same as nimbus_portal_bridge, deprecated +portal_bridge: | build deps rocksdb + echo -e "\033[0;31mWarning:\033[0m The portal_bridge target and binary is deprecated, use 'make nimbus_portal_bridge' instead" + echo -e $(BUILD_MSG) "build/$@" && \ + $(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "portal/bridge/nimbus_portal_bridge.nim" + # Nimbus Verified Proxy related targets # Builds the nimbus_verified_proxy @@ -359,7 +375,7 @@ txparse: | build deps # usual cleaning clean: | clean-common - rm -rf build/{nimbus,nimbus_execution_client,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM} + rm -rf build/{nimbus,nimbus_execution_client,nimbus_portal_client,fluffy,portal_bridge,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(PORTAL_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_portal_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM} rm -rf tools/t8n/{t8n,t8n_test} rm -rf tools/evmstate/{evmstate,evmstate_test} ifneq ($(USE_LIBBACKTRACE), 0) diff --git a/README.md b/README.md index cd6d207fdd..879cb715ef 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![License: Apache](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) [![GH action-nimbus-eth1](https://github.com/status-im/nimbus-eth1/actions/workflows/ci.yml/badge.svg)](https://github.com/status-im/nimbus-eth1/actions/workflows/ci.yml) -[![GH action-fluffy](https://github.com/status-im/nimbus-eth1/actions/workflows/fluffy.yml/badge.svg)](https://github.com/status-im/nimbus-eth1/actions/workflows/fluffy.yml) +[![GH action-portal](https://github.com/status-im/nimbus-eth1/actions/workflows/portal.yml/badge.svg)](https://github.com/status-im/nimbus-eth1/actions/workflows/portal.yml) [![Discord: Nimbus](https://img.shields.io/badge/discord-nimbus-orange.svg)](https://discord.gg/XRxWahP) [![Status: #nimbus-general](https://img.shields.io/badge/status-nimbus--general-orange.svg)](https://get.status.im/chat/public/nimbus-general) @@ -12,7 +12,7 @@ This repository contains development work on an execution-layer client to pair with [our consensus-layer client](https://github.com/status-im/nimbus-eth2). This client focuses on efficiency and security and strives to be as light-weight as possible in terms of resources used. This repository is also home to: -- [Fluffy](./fluffy/README.md), a +- [Nimbus Portal client](./portal/README.md), a [Portal Network](https://github.com/ethereum/portal-network-specs/tree/master) light client. - [Nimbus Verified Proxy](./nimbus_verified_proxy/README.md) diff --git a/execution_chain/db/era1_db/db_desc.nim b/execution_chain/db/era1_db/db_desc.nim index 250962c107..7734f85dce 100644 --- a/execution_chain/db/era1_db/db_desc.nim +++ b/execution_chain/db/era1_db/db_desc.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -15,11 +15,11 @@ import std/[os, parseutils, strutils, tables], results, eth/common/blocks, - ../../../fluffy/eth_data/era1 + ../../../portal/eth_data/era1 export results, blocks -# TODO this is a "rough copy" of the fluffy DB, minus the accumulator (it goes +# TODO this is a "rough copy" of the portal era1 DB, minus the accumulator (it goes # by era number alone instead of rooted name) - eventually the two should # be merged, when eth1 gains accumulators in its metadata diff --git a/execution_chain/rpc/rpc_types.nim b/execution_chain/rpc/rpc_types.nim index 3b809453db..f1c8fa695c 100644 --- a/execution_chain/rpc/rpc_types.nim +++ b/execution_chain/rpc/rpc_types.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2018-2024 Status Research & Development GmbH +# Copyright (c) 2018-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) # * MIT license ([LICENSE-MIT](LICENSE-MIT)) @@ -19,5 +19,5 @@ type FilterLog* = eth_api_types.LogObject # BlockTag instead of BlockId: - # prevent type clash with eth2 BlockId in fluffy/verified_proxy + # prevent type clash with eth2 BlockId in portal/verified_proxy BlockTag* = eth_api_types.RtBlockIdentifier diff --git a/fluffy/docs/the_fluffy_book/docs/test-suite.md b/fluffy/docs/the_fluffy_book/docs/test-suite.md deleted file mode 100644 index a4b90e8b72..0000000000 --- a/fluffy/docs/the_fluffy_book/docs/test-suite.md +++ /dev/null @@ -1,15 +0,0 @@ -# Fluffy test suite - -## Run Fluffy test suite -```bash -# From the nimbus-eth1 repository -make fluffy-test -``` - -## Run Fluffy local testnet script -```bash -./fluffy/scripts/launch_local_testnet.sh --run-tests -``` - -Find more details on the usage and workings of the local testnet script -[here](./run-local-testnet.md). diff --git a/nimbus.nimble b/nimbus.nimble index 3b6a5f0f10..667342e3e1 100644 --- a/nimbus.nimble +++ b/nimbus.nimble @@ -38,7 +38,7 @@ binDir = "build" when declared(namedBin): namedBin = { "execution_chain/nimbus_execution_client": "nimbus_execution_client", - "fluffy/fluffy": "fluffy", + "portal/client/nimbus_portal_client": "nimbus_portal_client", "nimbus_verified_proxy/nimbus_verified_proxy": "nimbus_verified_proxy", }.toTable() @@ -96,28 +96,28 @@ task test_import, "Run block import test": task test_evm, "Run EVM tests": test "tests", "evm_tests", "-d:chronicles_log_level=ERROR -d:unittest2DisableParamFiltering" -## Fluffy tasks +## Portal tasks -task fluffy, "Build fluffy": - buildBinary "fluffy", "fluffy/", "-d:chronicles_log_level=TRACE" +task nimbus_portal_client, "Build nimbus_portal_client": + buildBinary "nimbus_portal_client", "portal/client/", "-d:chronicles_log_level=TRACE" -task fluffy_test, "Run fluffy tests": +task portal_test, "Run Portal tests": # Need the nimbus_db_backend in state network tests as we need a Hexary to # start from, even though it only uses the MemoryDb. - test "fluffy/tests/portal_spec_tests/mainnet", "all_fluffy_portal_spec_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" + test "portal/tests/history_network_tests/", "all_history_network_custom_chain_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" # Seperate build for these tests as they are run with a low `mergeBlockNumber` # to make the tests faster. Using the real mainnet merge block number is not # realistic for these tests. - test "fluffy/tests", "all_fluffy_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130" + test "portal/tests", "all_portal_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130" task utp_test_app, "Build uTP test app": - buildBinary "utp_test_app", "fluffy/tools/utp_testing/", "-d:chronicles_log_level=TRACE" + buildBinary "utp_test_app", "portal/tools/utp_testing/", "-d:chronicles_log_level=TRACE" task utp_test, "Run uTP integration tests": - test "fluffy/tools/utp_testing", "utp_test", "-d:chronicles_log_level=ERROR" + test "portal/tools/utp_testing", "utp_test", "-d:chronicles_log_level=ERROR" task test_portal_testnet, "Build test_portal_testnet": - buildBinary "test_portal_testnet", "fluffy/scripts/", "-d:chronicles_log_level=DEBUG -d:unittest2DisableParamFiltering" + buildBinary "test_portal_testnet", "portal/scripts/", "-d:chronicles_log_level=DEBUG -d:unittest2DisableParamFiltering" ## Nimbus Verified Proxy tasks diff --git a/fluffy/README.md b/portal/README.md similarity index 77% rename from fluffy/README.md rename to portal/README.md index 00a05bdb68..13870eebc0 100644 --- a/fluffy/README.md +++ b/portal/README.md @@ -1,6 +1,6 @@ -# Fluffy: The Nimbus Portal Network Client +# The Nimbus Portal Network Client -[![Fluffy CI](https://github.com/status-im/nimbus-eth1/actions/workflows/fluffy.yml/badge.svg)](https://github.com/status-im/nimbus-eth1/actions/workflows/fluffy.yml) +[![Nimbus Portal client CI](https://github.com/status-im/nimbus-eth1/actions/workflows/portal.yml/badge.svg)](https://github.com/status-im/nimbus-eth1/actions/workflows/portal.yml) ![Stability: experimental](https://img.shields.io/badge/stability-experimental-orange.svg) [![License: Apache](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT) @@ -10,11 +10,11 @@ ## Introduction -Fluffy is the Nimbus client implementation of the +The `nimbus_portal_client` is the Nimbus client implementation of the [Portal network specifications](https://github.com/ethereum/portal-network-specs). -You can find all the information you need to run a Fluffy node in -[The Fluffy Book](https://status-im.github.io/nimbus-eth1). +You can find all the information you need to run a the `nimbus_portal_client` in +[the guide](https://status-im.github.io/nimbus-eth1). The [quickstart page](https://status-im.github.io/nimbus-eth1/quick-start.html) in particular will help you to get quickly connected to the Portal network. diff --git a/fluffy/tools/portal_bridge/portal_bridge.nim b/portal/bridge/nimbus_portal_bridge.nim similarity index 88% rename from fluffy/tools/portal_bridge/portal_bridge.nim rename to portal/bridge/nimbus_portal_bridge.nim index 4f7f69755a..9008a46936 100644 --- a/fluffy/tools/portal_bridge/portal_bridge.nim +++ b/portal/bridge/nimbus_portal_bridge.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -17,7 +17,7 @@ # making use of the Beacon Node REST-API, and a Portal node on the other side, # making use of the Portal JSON-RPC API. # -# Portal Network <-> Portal Client (e.g. fluffy) <--JSON-RPC--> bridge <--REST--> consensus client (e.g. Nimbus-eth2) +# Portal Network <-> Portal Client (e.g. nimbus_portal_client) <--JSON-RPC--> bridge <--REST--> consensus client (e.g. Nimbus-eth2) # # The Consensus client must support serving the Beacon LC data. # @@ -34,7 +34,7 @@ # of the EL JSON-RPC API, and a Portal node on the other side, making use of the # Portal JSON-RPC API. # -# Portal Network <-> Portal Client (e.g. fluffy) <--Portal JSON-RPC--> bridge <--EL JSON-RPC--> execution client / web3 provider +# Portal Network <-> Portal Client (e.g. nimbus_portal_client) <--Portal JSON-RPC--> bridge <--EL JSON-RPC--> execution client / web3 provider # # Backfilling is not yet implemented. Backfilling will make use of Era1 files. # @@ -50,9 +50,10 @@ import chronicles, confutils, confutils/std/net, - ../../logging, + ../logging, ./[ - portal_bridge_conf, portal_bridge_beacon, portal_bridge_history, portal_bridge_state + nimbus_portal_bridge_conf, portal_bridge_beacon, portal_bridge_history, + portal_bridge_state, ] type PortalBridgeStatus = enum diff --git a/fluffy/tools/portal_bridge/nim.cfg b/portal/bridge/nimbus_portal_bridge.nim.cfg similarity index 100% rename from fluffy/tools/portal_bridge/nim.cfg rename to portal/bridge/nimbus_portal_bridge.nim.cfg diff --git a/fluffy/tools/portal_bridge/portal_bridge_conf.nim b/portal/bridge/nimbus_portal_bridge_conf.nim similarity index 94% rename from fluffy/tools/portal_bridge/portal_bridge_conf.nim rename to portal/bridge/nimbus_portal_bridge_conf.nim index 056b85c1e4..04e52af7d0 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_conf.nim +++ b/portal/bridge/nimbus_portal_bridge_conf.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -12,9 +12,10 @@ import confutils, confutils/std/net, nimcrypto/hash, - ../../network_metadata, - ../../eth_data/era1, - ../../[conf, logging] + ../network/network_metadata, + ../eth_data/era1, + ../client/nimbus_portal_client_conf, + ../logging export net @@ -33,10 +34,15 @@ proc defaultEra1DataDir*(): string = defaultEthDataDir() / "era1" proc defaultPortalBridgeStateDir*(): string = - when defined(windows) or defined(macosx): - defaultDataDir() / "Bridge" / "State" - else: - defaultDataDir() / "bridge" / "state" + let relativeDataDir = + when defined(windows): + "AppData" / "Roaming" / "Nimbus" / "PortalBridge" + elif defined(macosx): + "Library" / "Application Support" / "Nimbus" / "PortalBridge" + else: + ".cache" / "nimbus" / "portal-bridge" + + getHomeDir() / relativeDataDir const defaultEndEra* = uint64(era(network_metadata.mergeBlockNumber - 1)) @@ -138,7 +144,7 @@ type era1Dir* {. desc: "The directory where all era1 files are stored", defaultValue: defaultEra1DataDir(), - defaultValueDesc: defaultEra1DataDir(), + defaultValueDesc: "", name: "era1-dir" .}: InputDir @@ -165,7 +171,7 @@ type stateDir* {. desc: "The directory where the state data is stored", defaultValue: defaultPortalBridgeStateDir(), - defaultValueDesc: defaultPortalBridgeStateDir(), + defaultValueDesc: "", name: "state-dir" .}: InputDir diff --git a/fluffy/tools/portal_bridge/portal_bridge_beacon.nim b/portal/bridge/portal_bridge_beacon.nim similarity index 98% rename from fluffy/tools/portal_bridge/portal_bridge_beacon.nim rename to portal/bridge/portal_bridge_beacon.nim index fd6b81b061..9957ce17ae 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_beacon.nim +++ b/portal/bridge/portal_bridge_beacon.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -16,10 +16,10 @@ import eth/async_utils, json_rpc/clients/httpclient, beacon_chain/spec/eth2_apis/rest_beacon_client, - ../../network/beacon/beacon_content, - ../../rpc/portal_rpc_client, - ../eth_data_exporter/cl_data_exporter, - ./[portal_bridge_conf, portal_bridge_common] + ../network/beacon/beacon_content, + ../rpc/portal_rpc_client, + ../tools/eth_data_exporter/cl_data_exporter, + ./[nimbus_portal_bridge_conf, portal_bridge_common] const restRequestsTimeout = 30.seconds @@ -280,7 +280,7 @@ proc gossipHistoricalSummaries( err("No historical summaries pre-Capella") proc runBeacon*(config: PortalBridgeConf) {.raises: [CatchableError].} = - notice "Launching Fluffy beacon chain bridge", cmdParams = commandLineParams() + notice "Launching Nimbus Portal beacon chain bridge", cmdParams = commandLineParams() let (cfg, forkDigests, beaconClock) = getBeaconData() diff --git a/fluffy/tools/portal_bridge/portal_bridge_common.nim b/portal/bridge/portal_bridge_common.nim similarity index 95% rename from fluffy/tools/portal_bridge/portal_bridge_common.nim rename to portal/bridge/portal_bridge_common.nim index ae68a8c523..39bdc41f86 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_common.nim +++ b/portal/bridge/portal_bridge_common.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -11,8 +11,8 @@ import chronicles, json_rpc/rpcclient, web3/[eth_api, eth_api_types], - ../../rpc/rpc_calls/rpc_trace_calls, - ./portal_bridge_conf + ../rpc/rpc_calls/rpc_trace_calls, + ./nimbus_portal_bridge_conf export rpcclient diff --git a/fluffy/tools/portal_bridge/portal_bridge_history.nim b/portal/bridge/portal_bridge_history.nim similarity index 96% rename from fluffy/tools/portal_bridge/portal_bridge_history.nim rename to portal/bridge/portal_bridge_history.nim index 124b302110..89db1c5501 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_history.nim +++ b/portal/bridge/portal_bridge_history.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -17,14 +17,14 @@ import eth/common/keys, eth/common/[base, headers_rlp, blocks_rlp, receipts], eth/p2p/discoveryv5/random2, - ../../../execution_chain/beacon/web3_eth_conv, - ../../../hive_integration/nodocker/engine/engine_client, - ../../rpc/portal_rpc_client, - ../../network/history/[history_content, history_type_conversions, history_validation], - ../../network_metadata, - ../../eth_data/[era1, history_data_ssz_e2s, history_data_seeding], - ../../database/era1_db, - ./[portal_bridge_conf, portal_bridge_common] + ../../execution_chain/beacon/web3_eth_conv, + ../../hive_integration/nodocker/engine/engine_client, + ../rpc/portal_rpc_client, + ../network/history/[history_content, history_type_conversions, history_validation], + ../network/network_metadata, + ../eth_data/[era1, history_data_ssz_e2s, history_data_seeding], + ../database/era1_db, + ./[nimbus_portal_bridge_conf, portal_bridge_common] from stew/objects import checkedEnumAssign @@ -309,8 +309,9 @@ proc runBackfillLoop( # Note: # There are two design options here: - # 1. Provide the Era1 file through the fluffy custom debug API and let - # fluffy process the Era1 file and gossip the content from there. + # 1. Provide the Era1 file through the nimbus_portal_client custom debug API + # and let nimbus_portal_client process the Era1 file and gossip the content + # from there. # 2. Process the Era1 files in the bridge and call the # standardized gossip JSON-RPC method. # diff --git a/fluffy/tools/portal_bridge/portal_bridge_state.nim b/portal/bridge/portal_bridge_state.nim similarity index 99% rename from fluffy/tools/portal_bridge/portal_bridge_state.nim rename to portal/bridge/portal_bridge_state.nim index 12a9f8aab8..d297c7ccd0 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_state.nim +++ b/portal/bridge/portal_bridge_state.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -17,12 +17,12 @@ import web3/[eth_api, eth_api_types], results, eth/common/[addresses_rlp, hashes_rlp], - ../../../execution_chain/common/chain_config, - ../../rpc/rpc_calls/rpc_trace_calls, - ../../rpc/portal_rpc_client, - ../../network/state/[state_content, state_gossip], + ../../execution_chain/common/chain_config, + ../rpc/rpc_calls/rpc_trace_calls, + ../rpc/portal_rpc_client, + ../network/state/[state_content, state_gossip], ./state_bridge/[database, state_diff, world_state_helper, offers_builder], - ./[portal_bridge_conf, portal_bridge_common] + ./[nimbus_portal_bridge_conf, portal_bridge_common] logScope: topics = "portal_bridge" diff --git a/fluffy/tools/portal_bridge/state_bridge/database.nim b/portal/bridge/state_bridge/database.nim similarity index 98% rename from fluffy/tools/portal_bridge/state_bridge/database.nim rename to portal/bridge/state_bridge/database.nim index 517fd38340..58abd512ce 100644 --- a/fluffy/tools/portal_bridge/state_bridge/database.nim +++ b/portal/bridge/state_bridge/database.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/tools/portal_bridge/state_bridge/offers_builder.nim b/portal/bridge/state_bridge/offers_builder.nim similarity index 96% rename from fluffy/tools/portal_bridge/state_bridge/offers_builder.nim rename to portal/bridge/state_bridge/offers_builder.nim index 51d3375f72..eb3593b0f3 100644 --- a/fluffy/tools/portal_bridge/state_bridge/offers_builder.nim +++ b/portal/bridge/state_bridge/offers_builder.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -10,7 +10,7 @@ import std/[sequtils, sugar], eth/common/hashes, - ../../../network/state/[state_content, state_utils, state_gossip], + ../../network/state/[state_content, state_utils, state_gossip], ./world_state type OffersBuilder* = object diff --git a/fluffy/tools/portal_bridge/state_bridge/state_diff.nim b/portal/bridge/state_bridge/state_diff.nim similarity index 97% rename from fluffy/tools/portal_bridge/state_bridge/state_diff.nim rename to portal/bridge/state_bridge/state_diff.nim index 19033f9f53..6b76a2c9d5 100644 --- a/fluffy/tools/portal_bridge/state_bridge/state_diff.nim +++ b/portal/bridge/state_bridge/state_diff.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -12,7 +12,7 @@ import stew/byteutils, stint, eth/common/base, - ../../../rpc/rpc_calls/rpc_trace_calls, + ../../rpc/rpc_calls/rpc_trace_calls, ../portal_bridge_common type diff --git a/fluffy/tools/portal_bridge/state_bridge/world_state.nim b/portal/bridge/state_bridge/world_state.nim similarity index 98% rename from fluffy/tools/portal_bridge/state_bridge/world_state.nim rename to portal/bridge/state_bridge/world_state.nim index 3cde26a3cd..a6e09a67de 100644 --- a/fluffy/tools/portal_bridge/state_bridge/world_state.nim +++ b/portal/bridge/state_bridge/world_state.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -11,8 +11,8 @@ import stint, eth/[rlp, trie, trie/db, trie/trie_defs], eth/common/[base, hashes, accounts_rlp], - ../../../common/common_types, - ../../../network/state/state_utils, + ../../common/common_types, + ../../network/state/state_utils, ./database # Account State definition diff --git a/fluffy/tools/portal_bridge/state_bridge/world_state_helper.nim b/portal/bridge/state_bridge/world_state_helper.nim similarity index 96% rename from fluffy/tools/portal_bridge/state_bridge/world_state_helper.nim rename to portal/bridge/state_bridge/world_state_helper.nim index 623a4a0606..647e9147f0 100644 --- a/fluffy/tools/portal_bridge/state_bridge/world_state_helper.nim +++ b/portal/bridge/state_bridge/world_state_helper.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -12,10 +12,10 @@ import stint, results, eth/common/addresses, - ../../../../execution_chain/common/chain_config, + ../../../execution_chain/common/chain_config, ./[state_diff, world_state] -from ../../../../execution_chain/core/dao import DAORefundContract, DAODrainList +from ../../../execution_chain/core/dao import DAORefundContract, DAODrainList export chain_config, state_diff, world_state diff --git a/fluffy/fluffy.nim b/portal/client/nimbus_portal_client.nim similarity index 80% rename from fluffy/fluffy.nim rename to portal/client/nimbus_portal_client.nim index bacbd9749f..9939fc344a 100644 --- a/fluffy/fluffy.nim +++ b/portal/client/nimbus_portal_client.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -22,20 +22,25 @@ import eth/common/keys, eth/net/nat, eth/p2p/discoveryv5/protocol as discv5_protocol, - ./conf, - ./network_metadata, - ./common/common_utils, - ./evm/[async_evm, async_evm_portal_backend], - ./rpc/[ + ../common/common_utils, + ../common/common_deprecation, + ../evm/[async_evm, async_evm_portal_backend], + ../rpc/[ rpc_eth_api, rpc_debug_api, rpc_discovery_api, rpc_portal_common_api, rpc_portal_history_api, rpc_portal_beacon_api, rpc_portal_state_api, rpc_portal_nimbus_beacon_api, rpc_portal_debug_history_api, ], - ./database/content_db, - ./network/wire/portal_protocol_version, - ./portal_node, - ./version, - ./logging + ../database/content_db, + ../network/wire/portal_protocol_version, + ../network/[portal_node, network_metadata], + ../version, + ../logging, + ./nimbus_portal_client_conf + +const + enrFileName = "portal_node.enr" + lockFileName = "portal_node.lock" + contentDbFileName = "contentdb" chronicles.formatIt(IoErrorCode): $it @@ -47,38 +52,39 @@ func optionToOpt[T](o: Option[T]): Opt[T] = Opt.none(T) type - FluffyStatus = enum + PortalClientStatus = enum Starting Running Stopping - Fluffy = ref object - status: FluffyStatus + PortalClient = ref object + status: PortalClientStatus portalNode: PortalNode metricsServer: Opt[MetricsHttpServerRef] rpcHttpServer: Opt[RpcHttpServer] rpcWsServer: Opt[RpcWebSocketServer] -proc init(T: type Fluffy): T = - Fluffy(status: FluffyStatus.Starting) +proc init(T: type PortalClient): T = + PortalClient(status: PortalClientStatus.Starting) -proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = +proc run(portalClient: PortalClient, config: PortalConf) {.raises: [CatchableError].} = setupLogging(config.logLevel, config.logStdout, none(OutFile)) - notice "Launching Fluffy", version = fullVersionStr, cmdParams = commandLineParams() + notice "Launching Nimbus Portal client", + version = fullVersionStr, cmdParams = commandLineParams() let rng = newRng() + let dataDir = config.dataDir.string # Make sure dataDir exists - let pathExists = createPath(config.dataDir.string) + let pathExists = createPath(dataDir) if pathExists.isErr(): - fatal "Failed to create data directory", - dataDir = config.dataDir, error = pathExists.error + fatal "Failed to create data directory", dataDir, error = pathExists.error quit QuitFailure # Make sure multiple instances to the same dataDir do not exist let - lockFilePath = config.dataDir.string / "fluffy.lock" + lockFilePath = dataDir / lockFileName lockFlags = {OpenFlags.Create, OpenFlags.Read, OpenFlags.Write} lockFileHandleResult = openFile(lockFilePath, lockFlags) @@ -88,8 +94,8 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = let lockFileHandle = lockFile(lockFileHandleResult.value(), LockType.Exclusive) if lockFileHandle.isErr(): - fatal "Please ensure no other fluffy instances are running with the same data directory", - dataDir = config.dataDir + fatal "Please ensure no other nimbus_portal_client instances are running with the same data directory", + dataDir quit QuitFailure let lockFileIoHandle = lockFileHandle.value() @@ -99,6 +105,11 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = discard closeFile(lockFileIoHandle.handle) ) + # Check for legacy files and move them to the new naming in case they exist + # TODO: Remove this at some point in the future + moveFileIfExists(dataDir / legacyEnrFileName, dataDir / enrFileName) + moveFileIfExists(dataDir / legacyLockFileName, dataDir / lockFileName) + ## Network configuration let bindIp = config.listenAddress @@ -106,7 +117,9 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = # TODO: allow for no TCP port mapping! (extIp, _, extUdpPort) = try: - setupAddress(config.nat, config.listenAddress, udpPort, udpPort, "fluffy") + setupAddress( + config.nat, config.listenAddress, udpPort, udpPort, "nimbus_portal_client" + ) except CatchableError as exc: raise exc # TODO: Ideally we don't have the Exception here except Exception as exc: @@ -117,7 +130,7 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = else: getPersistentNetKey(rng[], config.networkKeyFile) - enrFilePath = config.dataDir / "fluffy_node.enr" + enrFilePath = dataDir / enrFileName previousEnr = if not newNetKey: getPersistentEnr(enrFilePath) @@ -172,7 +185,7 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = ## Force pruning - optional if config.forcePrune: let db = ContentDB.new( - config.dataDir / config.network.getDbDirectory() / "contentdb_" & + dataDir / config.network.getDbDirectory() / "contentdb_" & d.localNode.id.toBytesBE().toOpenArray(0, 8).toHex(), storageCapacity = config.storageCapacityMB * 1_000_000, radiusConfig = config.radiusConfig, @@ -195,6 +208,24 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = db.forcePrune(d.localNode.id, radius) db.close() + # Check for legacy db naming and move to the new naming in case it exist + # TODO: Remove this at some point in the future + let dbPath = + dataDir / config.network.getDbDirectory() / "contentdb_" & + d.localNode.id.toBytesBE().toOpenArray(0, 8).toHex() + moveFileIfExists( + dbPath / legacyContentDbFileName & ".sqlite3", + dbPath / contentDbFileName & ".sqlite3", + ) + moveFileIfExists( + dbPath / legacyContentDbFileName & ".sqlite3-shm", + dbPath / contentDbFileName & ".sqlite3-shm", + ) + moveFileIfExists( + dbPath / legacyContentDbFileName & ".sqlite3-wal", + dbPath / contentDbFileName & ".sqlite3-wal", + ) + ## Portal node setup let portalProtocolConfig = PortalProtocolConfig.init( @@ -213,7 +244,7 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = disableStateRootValidation: config.disableStateRootValidation, trustedBlockRoot: config.trustedBlockRoot.optionToOpt(), portalConfig: portalProtocolConfig, - dataDir: string config.dataDir, + dataDir: dataDir, storageCapacity: config.storageCapacityMB * 1_000_000, contentRequestRetries: config.contentRequestRetries.int, contentQueueWorkers: config.contentQueueWorkers, @@ -228,10 +259,7 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = rng = rng, ) - # TODO: If no new network key is generated then we should first check if an - # enr file exists, and in the case it does read out the seqNum from it and - # reuse that. - let enrFile = config.dataDir / "fluffy_node.enr" + let enrFile = dataDir / enrFileName if io2.writeFile(enrFile, d.localNode.record.toURI()).isErr: fatal "Failed to write the enr file", file = enrFile quit 1 @@ -263,7 +291,7 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = ## Start the Portal node. node.start() - # For now the Fluffy EVM is only used by the RPC servers so we create the + # For now the async EVM is only used by the RPC servers so we create the # instance here and share it between all the RPC handlers that need it let asyncEvm = if node.stateNetwork.isSome(): @@ -345,13 +373,13 @@ proc run(fluffy: Fluffy, config: PortalConf) {.raises: [CatchableError].} = else: Opt.none(RpcWebSocketServer) - fluffy.status = FluffyStatus.Running - fluffy.portalNode = node - fluffy.metricsServer = metricsServer - fluffy.rpcHttpServer = rpcHttpServer - fluffy.rpcWsServer = rpcWsServer + portalClient.status = PortalClientStatus.Running + portalClient.portalNode = node + portalClient.metricsServer = metricsServer + portalClient.rpcHttpServer = rpcHttpServer + portalClient.rpcWsServer = rpcWsServer -proc stop(f: Fluffy) {.async: (raises: []).} = +proc stop(f: PortalClient) {.async: (raises: []).} = if f.rpcWsServer.isSome(): let server = f.rpcWsServer.get() try: @@ -386,10 +414,10 @@ when isMainModule: ) {.push raises: [].} - let fluffy = Fluffy.init() + let portalClient = PortalClient.init() case config.cmd of PortalCmd.noCommand: - fluffy.run(config) + portalClient.run(config) # Ctrl+C handling proc controlCHandler() {.noconv.} = @@ -401,17 +429,17 @@ when isMainModule: raiseAssert exc.msg # shouldn't happen notice "Shutting down after having received SIGINT" - fluffy.status = FluffyStatus.Stopping + portalClient.status = PortalClientStatus.Stopping try: setControlCHook(controlCHandler) except Exception as exc: # TODO Exception warn "Cannot set ctrl-c handler", msg = exc.msg - while fluffy.status == FluffyStatus.Running: + while portalClient.status == PortalClientStatus.Running: try: poll() except CatchableError as e: warn "Exception in poll()", exc = e.name, err = e.msg - waitFor fluffy.stop() + waitFor portalClient.stop() diff --git a/fluffy/fluffy.nim.cfg b/portal/client/nimbus_portal_client.nim.cfg similarity index 100% rename from fluffy/fluffy.nim.cfg rename to portal/client/nimbus_portal_client.nim.cfg diff --git a/fluffy/conf.nim b/portal/client/nimbus_portal_client_conf.nim similarity index 95% rename from fluffy/conf.nim rename to portal/client/nimbus_portal_client_conf.nim index af3fd9d3f2..d229c5ebca 100644 --- a/fluffy/conf.nim +++ b/portal/client/nimbus_portal_client_conf.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -17,20 +17,27 @@ import eth/p2p/discoveryv5/[enr, node, routing_table], nimcrypto/hash, stew/byteutils, + stew/io2, eth/net/nat, # must be late (compilation annoyance) - ./logging, - ./network/wire/portal_protocol_config + ../logging, + ../network/wire/portal_protocol_config, + ../common/common_deprecation proc defaultDataDir*(): string = - let dataDir = + # Backwards compatibility for the old default data directory + let legacyDir = legacyDataDir() + if fileAccessible(legacyDir, {AccessFlags.Find}): + return legacyDir + + let relativeDataDir = when defined(windows): - "AppData" / "Roaming" / "Fluffy" + "AppData" / "Roaming" / "Nimbus" / "PortalClient" elif defined(macosx): - "Library" / "Application Support" / "Fluffy" + "Library" / "Application Support" / "Nimbus" / "PortalClient" else: - ".cache" / "fluffy" + ".cache" / "nimbus" / "portal-client" - getHomeDir() / dataDir + getHomeDir() / relativeDataDir const defaultListenAddress* = (static parseIpAddress("0.0.0.0")) @@ -38,7 +45,6 @@ const defaultListenAddressDesc = $defaultListenAddress defaultAdminListenAddressDesc = $defaultAdminListenAddress - defaultDataDirDesc = defaultDataDir() defaultStorageCapacity* = 2000'u32 # 2 GB default defaultStorageCapacityDesc* = $defaultStorageCapacity @@ -140,9 +146,9 @@ type .}: bool dataDir* {. - desc: "The directory where fluffy will store the content data", + desc: "The directory where application data will be stored", defaultValue: defaultDataDir(), - defaultValueDesc: $defaultDataDirDesc, + defaultValueDesc: "", name: "data-dir" .}: OutDir @@ -295,7 +301,7 @@ type radiusConfig* {. desc: - "Radius configuration for a fluffy node. Radius can be either `dynamic` " & + "Radius configuration for the portal node. Radius can be either `dynamic` " & "where the node adjusts the radius based on `storage-size` option, " & "or `static:` where the node has a hardcoded logarithmic radius value. " & "Warning: `static:` disables `storage-size` limits and " & diff --git a/portal/common/common_deprecation.nim b/portal/common/common_deprecation.nim new file mode 100644 index 0000000000..9432ba56d0 --- /dev/null +++ b/portal/common/common_deprecation.nim @@ -0,0 +1,41 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import std/os, stew/io2, chronicles + +# Utils/consts/types only used to maintain backwards compatibility +# Functions or this file in its entirety should be removed when no longer needed +# They are put in this file to have a single place to maintain them making it +# more easy to search and remove them compared to when scattered across the codebase + +const + legacyEnrFileName* = "fluffy_node.enr" + legacyLockFileName* = "fluffy.lock" + legacyContentDbFileName* = "fluffy" + +proc moveFileIfExists*(src, dst: string) = + if fileAccessible(src, {AccessFlags.Find}): + try: + moveFile(src, dst) + except CatchableError as e: + fatal "Failed to move legacy file", src = src, dst = dst, error = e.msg + quit QuitFailure + except Exception as exc: + raiseAssert exc.msg + +proc legacyDataDir*(): string = + let relativeDataDir = + when defined(windows): + "AppData" / "Roaming" / "Fluffy" + elif defined(macosx): + "Library" / "Application Support" / "Fluffy" + else: + ".cache" / "fluffy" + + getHomeDir() / relativeDataDir diff --git a/fluffy/common/common_types.nim b/portal/common/common_types.nim similarity index 100% rename from fluffy/common/common_types.nim rename to portal/common/common_types.nim diff --git a/fluffy/common/common_utils.nim b/portal/common/common_utils.nim similarity index 100% rename from fluffy/common/common_utils.nim rename to portal/common/common_utils.nim diff --git a/fluffy/database/content_db.nim b/portal/database/content_db.nim similarity index 99% rename from fluffy/database/content_db.nim rename to portal/database/content_db.nim index da3a7823f7..f6d9d1e6ea 100644 --- a/fluffy/database/content_db.nim +++ b/portal/database/content_db.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -200,11 +200,11 @@ proc new*( let db = if inMemory: - SqStoreRef.init("", "fluffy-test", inMemory = true).expect( + SqStoreRef.init("", "contentdb-test", inMemory = true).expect( "working database (out of memory?)" ) else: - SqStoreRef.init(path, "fluffy", manualCheckpoint = false).expectDb() + SqStoreRef.init(path, "contentdb", manualCheckpoint = false).expectDb() db.createCustomFunction("xorDistance", 2, xorDistance).expect( "Custom function xorDistance creation OK" diff --git a/fluffy/database/content_db_custom_sql_functions.nim b/portal/database/content_db_custom_sql_functions.nim similarity index 99% rename from fluffy/database/content_db_custom_sql_functions.nim rename to portal/database/content_db_custom_sql_functions.nim index f3b7a5408b..1d8140d534 100644 --- a/fluffy/database/content_db_custom_sql_functions.nim +++ b/portal/database/content_db_custom_sql_functions.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/database/era1_db.nim b/portal/database/era1_db.nim similarity index 97% rename from fluffy/database/era1_db.nim rename to portal/database/era1_db.nim index 24063671ce..4515dc6c80 100644 --- a/fluffy/database/era1_db.nim +++ b/portal/database/era1_db.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/tools/docker/Dockerfile b/portal/docker/Dockerfile similarity index 75% rename from fluffy/tools/docker/Dockerfile rename to portal/docker/Dockerfile index 11121ce234..d92c9009cd 100644 --- a/fluffy/tools/docker/Dockerfile +++ b/portal/docker/Dockerfile @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -24,14 +24,17 @@ RUN git clone https://github.com/status-im/nimbus-eth1.git \ && make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" V=1 update RUN cd nimbus-eth1 && \ - make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" fluffy && \ - mv build/fluffy /usr/bin/ + make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" nimbus_portal_client && \ + mv build/nimbus_portal_client /usr/bin/ # --------------------------------- # # Starting new image to reduce size # # --------------------------------- # FROM debian:buster-slim AS deploy -COPY --from=build /usr/bin/fluffy /usr/bin/fluffy +COPY --from=build /usr/bin/nimbus_portal_client /usr/bin/nimbus_portal_client -ENTRYPOINT ["/usr/bin/fluffy"] +# Legacy name for backwards compatibility +RUN ln -s /usr/bin/nimbus_portal_client /usr/bin/fluffy + +ENTRYPOINT ["/usr/bin/nimbus_portal_client"] diff --git a/fluffy/tools/docker/Dockerfile.debug b/portal/docker/Dockerfile.debug similarity index 78% rename from fluffy/tools/docker/Dockerfile.debug rename to portal/docker/Dockerfile.debug index 674a3d95ff..8dff67e83b 100644 --- a/fluffy/tools/docker/Dockerfile.debug +++ b/portal/docker/Dockerfile.debug @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -27,13 +27,15 @@ RUN --mount=type=cache,target=/nimbus-eth1/build --mount=type=cache,target=/nim make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" V=1 update RUN --mount=type=cache,target=/nimbus-eth1/build --mount=type=cache,target=/nimbus-eth1/vendor \ - make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" fluffy && \ - mv build/fluffy /usr/local/bin/fluffy + make -j${NPROC} NIMFLAGS="${NIMFLAGS_COMMON} --parallelBuild:${NPROC}" nimbus_portal_client && \ + mv build/nimbus_portal_client /usr/local/bin/nimbus_portal_client # --------------------------------- # # Starting new image to reduce size # # --------------------------------- # FROM debian:stable-slim AS deploy -COPY --from=build /usr/local/bin/fluffy /usr/local/bin/fluffy +COPY --from=build /usr/bin/nimbus_portal_client /usr/bin/nimbus_portal_client -ENTRYPOINT ["/usr/local/bin/fluffy"] +RUN ln -s /usr/bin/nimbus_portal_client /usr/bin/fluffy + +ENTRYPOINT ["/usr/bin/nimbus_portal_client"] diff --git a/fluffy/tools/docker/Dockerfile.debug.dockerignore b/portal/docker/Dockerfile.debug.dockerignore similarity index 100% rename from fluffy/tools/docker/Dockerfile.debug.dockerignore rename to portal/docker/Dockerfile.debug.dockerignore diff --git a/fluffy/tools/docker/Dockerfile.debug.linux b/portal/docker/Dockerfile.debug.linux similarity index 58% rename from fluffy/tools/docker/Dockerfile.debug.linux rename to portal/docker/Dockerfile.debug.linux index b35169d3fe..5afb2e0966 100644 --- a/fluffy/tools/docker/Dockerfile.debug.linux +++ b/portal/docker/Dockerfile.debug.linux @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -7,4 +7,9 @@ FROM debian:stable-slim -COPY ./build/fluffy /usr/local/bin/fluffy +COPY ./build/nimbus_portal_client /usr/bin/nimbus_portal_client + +# Legacy name for backwards compatibility +RUN ln -s /usr/bin/nimbus_portal_client /usr/bin/fluffy + +ENTRYPOINT ["/usr/bin/nimbus_portal_client"] diff --git a/fluffy/docs/the_fluffy_book/docs/CNAME b/portal/docs/the_fluffy_book/docs/CNAME similarity index 100% rename from fluffy/docs/the_fluffy_book/docs/CNAME rename to portal/docs/the_fluffy_book/docs/CNAME diff --git a/fluffy/docs/the_fluffy_book/docs/access-content.md b/portal/docs/the_fluffy_book/docs/access-content.md similarity index 94% rename from fluffy/docs/the_fluffy_book/docs/access-content.md rename to portal/docs/the_fluffy_book/docs/access-content.md index 853a68cdd0..f1cb7d27de 100644 --- a/fluffy/docs/the_fluffy_book/docs/access-content.md +++ b/portal/docs/the_fluffy_book/docs/access-content.md @@ -1,6 +1,6 @@ # Access content on the Portal network -Once you have a Fluffy node [connected to network](./connect-to-portal.md) with +Once you have a Portal node [connected to network](./connect-to-portal.md) with the JSON-RPC interface enabled, then you can access the content available on the Portal network. diff --git a/fluffy/docs/the_fluffy_book/docs/adding-documentation.md b/portal/docs/the_fluffy_book/docs/adding-documentation.md similarity index 80% rename from fluffy/docs/the_fluffy_book/docs/adding-documentation.md rename to portal/docs/the_fluffy_book/docs/adding-documentation.md index 3313afa7bd..05f858473f 100644 --- a/fluffy/docs/the_fluffy_book/docs/adding-documentation.md +++ b/portal/docs/the_fluffy_book/docs/adding-documentation.md @@ -4,14 +4,14 @@ The documentation visible on [https://fluffy.guide](https://fluffy.guide) is gen If you want to be able to dry run any changes you make, you best install mkdocs locally. -All the documentation related files can be found under the `./fluffy/docs/the_fluffy_book` directory. +All the documentation related files can be found under the `./portal/docs/the_fluffy_book` directory. ## How to test and add documentation changes - Install `mkdocs` - Install Material for MkDocs by running `pip install mkdocs-material mkdocs-mermaid2-plugin`. - Make your changes to the documentation -- Run `mkdocs serve` from the `./fluffy/docs/the_fluffy_book` directory and test your changes. Alter as required. +- Run `mkdocs serve` from the `./portal/docs/the_fluffy_book` directory and test your changes. Alter as required. - Push your changes to a PR on nimbus-eth1 When the PR gets merged, a CI job will run that deploys automatically the changes to [https://fluffy.guide](https://fluffy.guide). diff --git a/fluffy/docs/the_fluffy_book/docs/architecture.md b/portal/docs/the_fluffy_book/docs/architecture.md similarity index 61% rename from fluffy/docs/the_fluffy_book/docs/architecture.md rename to portal/docs/the_fluffy_book/docs/architecture.md index d49c7c79d0..b9806f80b9 100644 --- a/fluffy/docs/the_fluffy_book/docs/architecture.md +++ b/portal/docs/the_fluffy_book/docs/architecture.md @@ -1,16 +1,16 @@ -# Fluffy Architecture +# Nimbus Portal Client Architecture -This section outlines Fluffy's architecture and shows the main components in the codebase. The arrows indicate a dependancy relationship between each component. +This section outlines the Nimbus Portal client's architecture and shows the main components in the codebase. The arrows indicate a dependancy relationship between each component. -## Fluffy high level architecture +## Nimbus Portal Client high level architecture -This diagram outlines the Fluffy high-level architecture. +This diagram outlines the Nimbus Portal client high-level architecture. ```mermaid graph TD; - Fluffy ---> id2(PortalNode) & id5(MetricsHttpServer) - Fluffy ---> id3(RpcHttpServer) & id4(RpcWebSocketServer) + nimbus_portal_client ---> id2(PortalNode) & id5(MetricsHttpServer) + nimbus_portal_client ---> id3(RpcHttpServer) & id4(RpcWebSocketServer) id3(RpcHttpServer) & id4(RpcWebSocketServer) & id2(PortalNode) ---> id7(BeaconNetwork) id3(RpcHttpServer) & id4(RpcWebSocketServer) & id2(PortalNode) ----> id8(HistoryNetwork) id3(RpcHttpServer) & id4(RpcWebSocketServer) & id2(PortalNode) -----> id9(StateNetwork) @@ -18,9 +18,9 @@ graph TD; id2(PortalNode) --> id10(Discv5Protocol) ``` -When Fluffy starts it runs an instance of `PortalNode` which manages the `Discv5Protocol`, `BeaconNetwork`, `HistoryNetwork` and `StateNetwork` instances. There is a single instance of each of these components and each of the subnetwork instances can be enabled/disabled depending on the startup configuration selected. The `PortalNode` instance includes everything needed to participate in the Portal network to enable storage of offered content and serving content requests from other Portal nodes. It may become part of a library in the future which would allow other projects to easily embed an instance of Fluffy in their codebase. +When `nimbus_portal_client` starts it runs an instance of `PortalNode` which manages the `Discv5Protocol`, `BeaconNetwork`, `HistoryNetwork` and `StateNetwork` instances. There is a single instance of each of these components and each of the subnetwork instances can be enabled/disabled depending on the startup configuration selected. The `PortalNode` instance includes everything needed to participate in the Portal network to enable storage of offered content and serving content requests from other Portal nodes. It may become part of a library in the future which would allow other projects to easily embed an instance of `nimbus_portal_client` in their codebase. -The `RpcHttpServer` and `RpcWebSocketServer` enable serving JSON-RPC requests from Portal network over HTTP and WebSocket respectively. These RPC servers depend on the Fluffy EVM (`AsyncEvm`) in order to implement the various endpoints which require asyncronous transaction execution while fetching state from the Portal network. +The `RpcHttpServer` and `RpcWebSocketServer` enable serving JSON-RPC requests from Portal network over HTTP and WebSocket respectively. These RPC servers depend on the Nimbus Portal EVM (`AsyncEvm`) in order to implement the various endpoints which require asyncronous transaction execution while fetching state from the Portal network. ## Portal subnetworks @@ -56,19 +56,19 @@ content from the `PortalStream` when the node receives content from peers. When initiated which is bigger than the max Discv5 message size, then the `PortalStream` transfers the content using the `UtpDiscv5Protocol` type which implements uTP on top of Discv5. -The `RoutingTable` implements a Kademlia based DHT which holds the peer ENRs which Fluffy discovers while participating +The `RoutingTable` implements a Kademlia based DHT which holds the peer ENRs which the Portal node discovers while participating in each of the Portal Wire subprotocols. The `RadiusCache` holds the last known radius for each peer which is collected -when pinging each node in the routing table periodically. The `OfferCache` caches the content ids of the most recent content successfully offered and stored so that Fluffy can reject content that it already has without doing a database lookup. The `ContentCache` improves the performance of content lookups (used by the JSON-RPC API's) by caching the most recently fetched +when pinging each node in the routing table periodically. The `OfferCache` caches the content ids of the most recent content successfully offered and stored so that the Portal node can reject content that it already has without doing a database lookup. The `ContentCache` improves the performance of content lookups (used by the JSON-RPC API's) by caching the most recently fetched content in a LRU cache. -The `ContentDb` is the main database in Fluffy which internally uses sqlite to store the content data on disk. The `PortalProtocol` +The `ContentDb` is the main database in `nimbus_portal_client` which internally uses sqlite to store the content data on disk. The `PortalProtocol` uses the `OfferQueue` to hold pending offer requests which are passed to the `PortalStream` by the concurrent offer workers which run as a part of `PortalProtocol`. -## Fluffy EVM +## Nimbus Portal EVM -This diagram outlines the architecture of the Fluffy EVM. +This diagram outlines the architecture of the Nimbus Portal EVM. ```mermaid @@ -79,6 +79,6 @@ graph TD; ``` -The Fluffy EVM is used by the `eth_call` and `eth_estimateGas` RPC endpoints which both need to execute bytecode in the EVM. +The Nimbus Portal EVM is used by the `eth_call` and `eth_estimateGas` RPC endpoints which both need to execute bytecode in the EVM. It uses an instance of the `AsyncEvm` which is built on top of the Nimbus EVM in order to provide asyncronous transaction execution that can fetch state concurrently from a configured backend. In this case we use the `AsyncEvmPortalBackend` which wires in the `StateNetwork` which provides the account, storage and bytecode state on demand from the portal state network when executing a transaction. diff --git a/fluffy/docs/the_fluffy_book/docs/basics-for-developers.md b/portal/docs/the_fluffy_book/docs/basics-for-developers.md similarity index 79% rename from fluffy/docs/the_fluffy_book/docs/basics-for-developers.md rename to portal/docs/the_fluffy_book/docs/basics-for-developers.md index 16b505ef75..7d407e38b9 100644 --- a/fluffy/docs/the_fluffy_book/docs/basics-for-developers.md +++ b/portal/docs/the_fluffy_book/docs/basics-for-developers.md @@ -1,6 +1,6 @@ # The basics for developers -When working on Fluffy in the nimbus-eth1 repository, you can run the `env.sh` +When working on Nimbus Portal client in the nimbus-eth1 repository, you can run the `env.sh` script to run a command with the right environment variables set. This means the vendored Nim and Nim modules will be used, just as when you use `make`. @@ -22,11 +22,11 @@ The code follows the ## Nim code formatting -The Fluffy codebase is formatted with [nph](https://github.com/arnetheduck/nph). +The `portal` codebase is formatted with [nph](https://github.com/arnetheduck/nph). Check out the [this page](https://arnetheduck.github.io/nph/installation.html) on how to install nph. -The Fluffy CI tests check the code formatting according to the style rules of nph. +The Nimbus Portal CI tests check the code formatting according to the style rules of nph. Developers will need to make sure the code changes in PRs are formatted as such. !!! note diff --git a/fluffy/docs/the_fluffy_book/docs/beacon-content-bridging.md b/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md similarity index 61% rename from fluffy/docs/the_fluffy_book/docs/beacon-content-bridging.md rename to portal/docs/the_fluffy_book/docs/beacon-content-bridging.md index 04b0a504c6..01a784ad72 100644 --- a/fluffy/docs/the_fluffy_book/docs/beacon-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md @@ -2,23 +2,23 @@ ## Seeding from content bridges -Run a Fluffy node with the JSON-RPC API enabled. +Run a Nimbus Portal client with the JSON-RPC API enabled. ```bash -./build/fluffy --rpc +./build/nimbus_portal_client --rpc ``` -Build & run the `portal_bridge` for the beacon network: +Build & run the `nimbus_portal_bridge` for the beacon network: ```bash -make portal_bridge +make nimbus_portal_bridge TRUSTED_BLOCK_ROOT=0x1234567890123456789012345678901234567890123456789012345678901234 # Replace with trusted block root. # --rest-url = access to beacon node API, default http://127.0.0.1:5052 # --portal-rpc=url = access to the Portal node API, default http://127.0.0.1:8545 -./build/portal_bridge beacon --trusted-block-root:${TRUSTED_BLOCK_ROOT} --rest-url:http://127.0.0.1:5052 --portal-rpc-url:http://127.0.0.1:8545 +./build/nimbus_portal_bridge beacon --trusted-block-root:${TRUSTED_BLOCK_ROOT} --rest-url:http://127.0.0.1:5052 --portal-rpc-url:http://127.0.0.1:8545 ``` -The `portal_bridge` will connect to Fluffy node over the JSON-RPC +The `nimbus_portal_bridge` will connect to Nimbus Portal client over the JSON-RPC interface and start gossiping an `LightClientBootstrap` for given trusted block root and gossip backfill `LightClientUpdate`s. diff --git a/fluffy/docs/the_fluffy_book/docs/build-from-source.md b/portal/docs/the_fluffy_book/docs/build-from-source.md similarity index 52% rename from fluffy/docs/the_fluffy_book/docs/build-from-source.md rename to portal/docs/the_fluffy_book/docs/build-from-source.md index 508d118fdf..e7a63e806b 100644 --- a/fluffy/docs/the_fluffy_book/docs/build-from-source.md +++ b/portal/docs/the_fluffy_book/docs/build-from-source.md @@ -1,18 +1,18 @@ # Build from source -Building Fluffy from source ensures that all hardware-specific optimizations are +Building the Nimbus Portal client from source ensures that all hardware-specific optimizations are turned on. The build process itself is simple and fully automated, but may take a few minutes. !!! note "Nim" - Fluffy is written in the [Nim](https://nim-lang.org) programming language. + `nimbus_portal_client` is written in the [Nim](https://nim-lang.org) programming language. The correct version will automatically be downloaded as part of the build process! ## Prerequisites Make sure you have all needed [prerequisites](./prerequisites.md). -## Building the Fluffy client +## Building the Nimbus Portal client ### 1. Clone the `nimbus-eth1` repository @@ -21,12 +21,12 @@ git clone https://github.com/status-im/nimbus-eth1.git cd nimbus-eth1 ``` -### 2. Run the Fluffy build process +### 2. Run the Nimbus Portal client build process -To build Fluffy and its dependencies, run: +To build the Nimbus Portal client and its dependencies, run: ```sh -make fluffy +make nimbus_portal_client ``` This step can take several minutes. @@ -34,15 +34,15 @@ After it has finished, you can check if the build was successful by running: ```sh # See available command line options -./build/fluffy --help +./build/nimbus_portal_client --help ``` If you see the command-line options, your installation was successful! -Otherwise, don't hesitate to reach out to us in the `#nimbus-fluffy` channel of +Otherwise, don't hesitate to reach out to us in the `#nimbus-portal` channel of [our discord](https://discord.gg/j3nYBUeEad). -## Keeping Fluffy updated +## Keeping the Nimbus Portal client updated -When you decide to upgrade Fluffy to a newer version, make sure to follow the -[how to upgrade page](./upgrade.md). \ No newline at end of file +When you decide to upgrade the Nimbus Portal client to a newer version, make sure to follow the +[how to upgrade page](./upgrade.md). diff --git a/fluffy/docs/the_fluffy_book/docs/calling-a-contract.md b/portal/docs/the_fluffy_book/docs/calling-a-contract.md similarity index 83% rename from fluffy/docs/the_fluffy_book/docs/calling-a-contract.md rename to portal/docs/the_fluffy_book/docs/calling-a-contract.md index 4d249e6ef1..5e95e1f062 100644 --- a/fluffy/docs/the_fluffy_book/docs/calling-a-contract.md +++ b/portal/docs/the_fluffy_book/docs/calling-a-contract.md @@ -1,15 +1,15 @@ # Calling a contract on the Portal network -Once you have a Fluffy node running and [connected to the network](./connect-to-portal.md) with +Once you have a Portal node running and [connected to the network](./connect-to-portal.md) with the JSON-RPC interface enabled, then you can call contracts using the `eth_call` JSON-RPC method which should be enabled by default. -Note that `eth_call` in Fluffy requires both the history network, state network and the `eth` +Note that `eth_call` in `nimbus_portal_client` requires both the history network, state network and the `eth` rpc api to be enabled. These should be enabled by default already but you can also manually enable these by running: ```bash -./build/fluffy --rpc --portal-subnetworks=history,state --rpc-api=eth +./build/nimbus_portal_client --rpc --portal-subnetworks=history,state --rpc-api=eth ``` Here is an example which calls one of the earliest contracts deployed on Ethereum, which allows @@ -30,7 +30,7 @@ curl -s -X POST -H 'Content-Type: application/json' -d '{ }' http://localhost:8545 ``` -Fluffy is able to call mainnet smart contracts without having to run a full execution client and +The Nimbus Portal client is able to call mainnet smart contracts without having to run a full execution client and without even having the state data stored locally. It does this by first fetching the bytecode of the contract being called from the portal state network diff --git a/fluffy/docs/the_fluffy_book/docs/connect-to-portal.md b/portal/docs/the_fluffy_book/docs/connect-to-portal.md similarity index 60% rename from fluffy/docs/the_fluffy_book/docs/connect-to-portal.md rename to portal/docs/the_fluffy_book/docs/connect-to-portal.md index cdb10d9bba..b3acfc57c7 100644 --- a/fluffy/docs/the_fluffy_book/docs/connect-to-portal.md +++ b/portal/docs/the_fluffy_book/docs/connect-to-portal.md @@ -3,13 +3,13 @@ Connecting to the current Portal network is as easy as running following command: ```sh -./build/fluffy --rpc +./build/nimbus_portal_client --rpc ``` This will connect to the public Portal mainnet which contains nodes of the different clients. !!! note - By default the Fluffy node will connect to the + By default the Portal node will connect to the [bootstrap nodes](https://github.com/ethereum/portal-network-specs/blob/master/bootnodes.md#bootnodes-mainnet) of the public mainnet. When testing locally the `--network:none` option can be provided to avoid @@ -18,9 +18,9 @@ This will connect to the public Portal mainnet which contains nodes of the diffe The `--rpc` option will also enable the different JSON-RPC interfaces through which you can access the Portal Network. -Fluffy fully supports the [Portal Network JSON-RPC Specification](https://playground.open-rpc.org/?schemaUrl=https://raw.githubusercontent.com/ethereum/portal-network-specs/assembled-spec/jsonrpc/openrpc.json&uiSchema%5BappBar%5D%5Bui:splitView%5D=false&uiSchema%5BappBar%5D%5Bui:input%5D=false&uiSchema%5BappBar%5D%5Bui:examplesDropdown%5D=false). +The Nimbus Portal client fully supports the [Portal Network JSON-RPC Specification](https://playground.open-rpc.org/?schemaUrl=https://raw.githubusercontent.com/ethereum/portal-network-specs/assembled-spec/jsonrpc/openrpc.json&uiSchema%5BappBar%5D%5Bui:splitView%5D=false&uiSchema%5BappBar%5D%5Bui:input%5D=false&uiSchema%5BappBar%5D%5Bui:examplesDropdown%5D=false). -Fluffy also supports a small subset of the [Execution JSON-RPC API](https://ethereum.github.io/execution-apis/api-documentation/). +The Nimbus Portal client also supports a small subset of the [Execution JSON-RPC API](https://ethereum.github.io/execution-apis/api-documentation/). !!! note diff --git a/fluffy/docs/the_fluffy_book/docs/db_pruning.md b/portal/docs/the_fluffy_book/docs/db_pruning.md similarity index 57% rename from fluffy/docs/the_fluffy_book/docs/db_pruning.md rename to portal/docs/the_fluffy_book/docs/db_pruning.md index 25e57fb007..9730af0bb2 100644 --- a/fluffy/docs/the_fluffy_book/docs/db_pruning.md +++ b/portal/docs/the_fluffy_book/docs/db_pruning.md @@ -1,22 +1,22 @@ # Database pruning -Default Fluffy runs with a specific storage capacity (`--storage-capacity=x`, default set to 2GB). This means that the node's radius is dynamically adjusted to not exceed the configured capacity. As soon as the storage capacity is to be exceeded the pruning of content takes place and a new smaller radius is set. +Default the Nimbus Portal client runs with a specific storage capacity (`--storage-capacity=x`, default set to 2GB). This means that the node's radius is dynamically adjusted to not exceed the configured capacity. As soon as the storage capacity is to be exceeded the pruning of content takes place and a new smaller radius is set. As long as the configured storage capacity remains the same, pruning is done automatically. -In case the storage capacity of a Fluffy node is changed, a manual step might +In case the storage capacity of a Nimbus Portal client is changed, a manual step might be required. There are two scenarios possible: - Adjusting to a higher storage capacity - Adjusting to a lower storage capacity ## Adjusting to a higher storage capacity -This requires no manual steps as no pruning will be required. On the restart of the Fluffy node with a higher configured storage capacity, the initial radius will be increased to the maximum radius until the new storage capacity is reached. Then the automatic pruning will take place and the radius will be decreased. +This requires no manual steps as no pruning will be required. On the restart of the Nimbus Portal client with a higher configured storage capacity, the initial radius will be increased to the maximum radius until the new storage capacity is reached. Then the automatic pruning will take place and the radius will be decreased. ## Adjusting to a lower storage capacity -When a Fluffy node is restarted with a lower storage capacity, pruning will take +When a Nimbus Portal client is restarted with a lower storage capacity, pruning will take place automatically. The database will be pruned in intervals until the storage drops under the newly configured storage capacity. The radius will also be adjusted with each pruning cycle. @@ -27,4 +27,4 @@ Because of this, the vacuum is not executed automatically but requires you to ma You can also use the `fcli_db` tool its `prune` command on the database directly to force this vacuuming. -Another simple but more drastic solution is to delete the `db` subdirectory in the `--data-dir` provided to your Fluffy node. This will start your Fluffy node with a fresh database. +Another simple but more drastic solution is to delete the `db` subdirectory in the `--data-dir` provided to your Nimbus Portal client. This will start your Nimbus Portal client with a fresh database. diff --git a/fluffy/docs/the_fluffy_book/docs/eth-data-exporter.md b/portal/docs/the_fluffy_book/docs/eth-data-exporter.md similarity index 100% rename from fluffy/docs/the_fluffy_book/docs/eth-data-exporter.md rename to portal/docs/the_fluffy_book/docs/eth-data-exporter.md diff --git a/fluffy/docs/the_fluffy_book/docs/history-content-bridging.md b/portal/docs/the_fluffy_book/docs/history-content-bridging.md similarity index 65% rename from fluffy/docs/the_fluffy_book/docs/history-content-bridging.md rename to portal/docs/the_fluffy_book/docs/history-content-bridging.md index b25622fcf1..59b2bb4671 100644 --- a/fluffy/docs/the_fluffy_book/docs/history-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/history-content-bridging.md @@ -1,42 +1,42 @@ # Bridging content into the Portal history network -## Seeding history content with the `portal_bridge` +## Seeding history content with the `nimbus_portal_bridge` -The `portal_bridge` requires `era1` files as source for the block content from before the merge. +The `nimbus_portal_bridge` requires `era1` files as source for the block content from before the merge. It requires access to a full node with EL JSON-RPC API for seeding the latest (head of the chain) block content. Any block content between the merge and the latest is currently not implemented, but will be implemented in the future by usage of `era` files as source. ### Step 1: Run a Portal client -Run a Portal client with the Portal JSON-RPC API enabled, e.g. Fluffy: +Run a Portal client with the Portal JSON-RPC API enabled, e.g. Nimbus Portal client: ```bash -./build/fluffy --rpc --storage-capacity:0 +./build/nimbus_portal_client --rpc --storage-capacity:0 ``` > Note: The `--storage-capacity:0` option is not required, but it is added here for the use case where the node's only focus is on gossiping content from the -`portal_bridge`. +`nimbus_portal_bridge`. ### Step 2: Run an EL client -The `portal_bridge` needs access to the EL JSON-RPC API, either through a local +The `nimbus_portal_bridge` needs access to the EL JSON-RPC API, either through a local Ethereum client or via a web3 provider. ### Step 3: Run the Portal bridge in history mode -Build & run the `portal_bridge`: +Build & run the `nimbus_portal_bridge`: ```bash -make portal_bridge +make nimbus_portal_bridge WEB3_URL="http://127.0.0.1:8548" # Replace with your provider. -./build/portal_bridge history --web3-url:${WEB3_URL} +./build/nimbus_portal_bridge history --web3-url:${WEB3_URL} ``` -Default the portal_bridge will run in `--latest` mode, which means that only the +Default the `nimbus_portal_bridge` will run in `--latest` mode, which means that only the latest block content will be gossiped into the network. -The portal_bridge also has a `--backfill` mode which will gossip pre-merge blocks +The `nimbus_portal_bridge` also has a `--backfill` mode which will gossip pre-merge blocks from `era1` files into the network. Default the bridge will audit first whether the content is available on the network and if not it will gossip it into the network. @@ -44,17 +44,17 @@ network. E.g. run latest + backfill with audit mode: ```bash WEB3_URL="http://127.0.0.1:8548" # Replace with your provider. -./build/portal_bridge history --latest:true --backfill:true --audit:true --era1-dir:/somedir/era1/ --web3-url:${WEB3_URL} +./build/nimbus_portal_bridge history --latest:true --backfill:true --audit:true --era1-dir:/somedir/era1/ --web3-url:${WEB3_URL} ``` -## Seeding directly from the fluffy client +## Seeding directly from the nimbus_portal_client This method currently only supports seeding block content from before the merge. It uses `era1` files as source for the content. -1. Run Fluffy and enable `portal_debug` JSON-RPC API: +1. Run the Nimbus Portal client and enable `portal_debug` JSON-RPC API: ```bash -./build/fluffy --rpc --rpc-api:portal,portal_debug +./build/nimbus_portal_client --rpc --rpc-api:portal,portal_debug ``` 2. Trigger the seeding of the content with the `portal_debug_historyGossipHeaders` and `portal_debug_historyGossipBlockContent` JSON-RPC methods. diff --git a/fluffy/docs/the_fluffy_book/docs/index.md b/portal/docs/the_fluffy_book/docs/index.md similarity index 85% rename from fluffy/docs/the_fluffy_book/docs/index.md rename to portal/docs/the_fluffy_book/docs/index.md index 1fb896cee9..dd2a390996 100644 --- a/fluffy/docs/the_fluffy_book/docs/index.md +++ b/portal/docs/the_fluffy_book/docs/index.md @@ -1,6 +1,6 @@ -# The Nimbus Fluffy Guide +# The Nimbus Portal client Guide -Fluffy is the Nimbus client implementation of the +The Nimbus Portal client is the Nimbus client implementation of the [Portal network specifications](https://github.com/ethereum/portal-network-specs). The Portal Network aims to deliver a reliable, sync-free, and decentralized @@ -9,10 +9,10 @@ get access to Ethereum data and as such become a drop-in replacement for full nodes by providing that data through the existing [Ethereum JSON RPC Execution API](https://github.com/ethereum/execution-apis). -This book describes how to build, run and monitor the Fluffy client, and how to +This book describes how to build, run and monitor the Nimbus Portal client, and how to use and test its currently implemented functionality. -To quickly get your Fluffy node up and running, follow the quickstart page: +To quickly get your Nimbus Portal client up and running, follow the quickstart page: - [Quickstart for Linux / macOS users](./quick-start.md) - [Quickstart for Windows users](./quick-start-windows.md) @@ -25,7 +25,7 @@ The development of this client is on par with the latest Portal specifications a The Portal history, beacon and state sub-networks are already operational on the public Portal mainnet. -Fluffy is default ran on the [Portal mainnet](https://github.com/ethereum/portal-network-specs/blob/master/bootnodes.md#bootnodes-mainnet) but can also be run on a (local) testnet. +The Nimbus Portal client is default ran on the [Portal mainnet](https://github.com/ethereum/portal-network-specs/blob/master/bootnodes.md#bootnodes-mainnet) but can also be run on a (local) testnet. ### Supported sub-networks and content @@ -70,6 +70,6 @@ If you'd like to contribute to Nimbus development: ## Disclaimer -This documentation assumes Nimbus Fluffy is in its ideal state. +This documentation assumes Nimbus Portal client is in its ideal state. The project is still under heavy development. Please submit a [Github issue](https://github.com/status-im/nimbus-eth1/issues) if you come across a problem. diff --git a/fluffy/docs/the_fluffy_book/docs/metrics.md b/portal/docs/the_fluffy_book/docs/metrics.md similarity index 74% rename from fluffy/docs/the_fluffy_book/docs/metrics.md rename to portal/docs/the_fluffy_book/docs/metrics.md index 6f2165a75e..e098962ba7 100644 --- a/fluffy/docs/the_fluffy_book/docs/metrics.md +++ b/portal/docs/the_fluffy_book/docs/metrics.md @@ -1,14 +1,14 @@ # Metrics and their visualisation In this page we'll cover how to enable metrics and how to use Grafana and -Prometheus to help you visualize these real-time metrics concerning the Fluffy +Prometheus to help you visualize these real-time metrics concerning the Portal node. -## Enable metrics in Fluffy +## Enable metrics in the Nimbus Portal client -To enable metrics run Fluffy with the `--metrics` flag: +To enable metrics run the Nimbus Portal client with the `--metrics` flag: ```bash -./build/fluffy --metrics +./build/nimbus_portal_client --metrics ``` Default the metrics are available at [http://127.0.0.1:8008/metrics](http://127.0.0.1:8008/metrics). @@ -24,9 +24,9 @@ over time and to also visualise them one can use for example Prometheus and Graf The steps on how to set up metrics visualisation with Prometheus and Grafana is explained in [this guide](https://nimbus.guide/metrics-pretty-pictures.html#prometheus-and-grafana). -A Fluffy specific dashboard can be found [here](https://github.com/status-im/nimbus-eth1/blob/master/fluffy/grafana/fluffy_grafana_dashboard.json). +A Nimbus Portal specific dashboard can be found [here](https://github.com/status-im/nimbus-eth1/blob/master/portal/metrics/grafana/fluffy_grafana_dashboard.json). -This is the dashboard used for our Fluffy Portal network fleet. +This is the dashboard used for our Nimbus Portal network fleet. In order to use it locally, you will have to remove the `{job="nimbus-fluffy-metrics"}` part from the `instance` and `container` variables queries in the dashboard settings. Or they can also be changed to a diff --git a/fluffy/docs/the_fluffy_book/docs/fluffy-with-hive.md b/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md similarity index 65% rename from fluffy/docs/the_fluffy_book/docs/fluffy-with-hive.md rename to portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md index 4651156041..5d1abc0951 100644 --- a/fluffy/docs/the_fluffy_book/docs/fluffy-with-hive.md +++ b/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md @@ -1,6 +1,6 @@ -# Testing Fluffy with hive +# Testing Nimbus Portal client with hive -Fluffy is one of the Portal clients that is being tested with [hive](https://github.com/ethereum/hive). +The `nimbus_portal_client` is one of the Portal clients that is being tested with [hive](https://github.com/ethereum/hive). To see the status of the tests for the current version you can access [https://portal-hive.ethdevops.io/](https://portal-hive.ethdevops.io/). @@ -17,7 +17,7 @@ go build . Example commands for running test suites: ```sh -# Run the portal hive tests with only the fluffy client +# Run the portal hive tests with only the Nimbus Portal client ./hive --sim portal --client fluffy # Run the portal hive tests with different clients @@ -39,8 +39,8 @@ go build ./cmd/hiveview ## Build a local development Docker image for hive -To debug & develop Fluffy code against hive tests you might want to -create a local development Docker image for Fluffy. +To debug & develop the Nimbus Portal client code against hive tests you might want to +create a local development Docker image. To do that follow next steps: @@ -48,18 +48,18 @@ To do that follow next steps: 2) Build the local development Docker image using the following command: ``` -docker build --tag fluffy-dev --file ./fluffy/tools/docker/Dockerfile.debug . +docker build --tag fluffy-dev --file ./portal/docker/Dockerfile.debug . ``` 3) Modify the `FROM` tag in the portal-hive `Dockerfile` of fluffy at `./hive/clients/fluffy/Dockerfile` to use the image that was build in step 2. -4) Run the tests as [usual](fluffy-with-portal-hive.md/#run-the-hive-tests-locally). +4) Run the tests as [usual](nimbus-portal-with-portal-hive.md/#run-the-hive-tests-locally). !!! warning The `./vendors` dir is dockerignored and cached. If you have to make local changes to one of the dependencies in that directory you will have to remove - `vendors/` from `./fluffy/tools/docker/Dockerfile.debug.dockerignore`. + `vendors/` from `./portal/docker/Dockerfile.debug.dockerignore`. !!! note - When developing on Linux the `./fluffy/tools/docker/Dockerfile.debug.linux` Dockerfile can also be used instead. It does require to manually build fluffy first as it copies over this binary. + When developing on Linux the `./portal/docker/Dockerfile.debug.linux` Dockerfile can also be used instead. It does require to manually build fluffy first as it copies over this binary. diff --git a/fluffy/docs/the_fluffy_book/docs/prerequisites.md b/portal/docs/the_fluffy_book/docs/prerequisites.md similarity index 82% rename from fluffy/docs/the_fluffy_book/docs/prerequisites.md rename to portal/docs/the_fluffy_book/docs/prerequisites.md index b434dcc743..651eb4dc62 100644 --- a/fluffy/docs/the_fluffy_book/docs/prerequisites.md +++ b/portal/docs/the_fluffy_book/docs/prerequisites.md @@ -1,6 +1,6 @@ # Prerequisites -The Fluffy client runs on Linux, macOS, Windows, and Android. +The Nimbus Portal client runs on Linux, macOS, Windows, and Android. ## Build prerequisites @@ -34,7 +34,7 @@ installed: ``` === "Windows" - To build Fluffy on Windows, the MinGW-w64 build environment is recommended. + To build the Nimbus Portal client on Windows, the MinGW-w64 build environment is recommended. - Install Mingw-w64 for your architecture using the "[MinGW-W64 Online Installer](https://sourceforge.net/projects/mingw-w64/files/)": @@ -49,14 +49,14 @@ installed: - Install [CMake](https://cmake.org/). - Install [Git for Windows](https://gitforwindows.org/) and use a "Git Bash" - shell to clone nimbus-eth1 and build Fluffy. + shell to clone nimbus-eth1 and build `nimbus_portal_client`. === "Android" - Install the [Termux](https://termux.com) app from FDroid or the Google Play store - Install a [PRoot](https://wiki.termux.com/wiki/PRoot) of your choice following the instructions for your preferred distribution. - The Ubuntu PRoot is known to contain all Fluffy prerequisites compiled on Arm64 architecture (the most common architecture for Android devices). + The Ubuntu PRoot is known to contain all `nimbus_portal_client` prerequisites compiled on Arm64 architecture (the most common architecture for Android devices). Assuming you use Ubuntu PRoot: diff --git a/fluffy/docs/the_fluffy_book/docs/protocol-interop-testing.md b/portal/docs/the_fluffy_book/docs/protocol-interop-testing.md similarity index 94% rename from fluffy/docs/the_fluffy_book/docs/protocol-interop-testing.md rename to portal/docs/the_fluffy_book/docs/protocol-interop-testing.md index 27d9c375f6..dc85398d19 100644 --- a/fluffy/docs/the_fluffy_book/docs/protocol-interop-testing.md +++ b/portal/docs/the_fluffy_book/docs/protocol-interop-testing.md @@ -10,13 +10,13 @@ that attempt to send 1 specific message and then shutdown. The first is more powerful and complete, the second one might be easier to do some quick testing. -## Run Fluffy and test protocol messages via JSON-RPC API +## Run the Nimbus Portal client and test protocol messages via JSON-RPC API -First build Fluffy as explained [here](./quick-start.md#build-the-fluffy-client). +First build the Nimbus Portal client as explained [here](./quick-start.md#build-the-nimbus-portal-client). Next run it with the JSON-RPC server enabled: ```bash -./build/fluffy --rpc --rpc-api:portal,discovery --bootstrap-node:enr: +./build/nimbus_portal_client --rpc --rpc-api:portal,discovery --bootstrap-node:enr: ``` ### Testing Discovery v5 layer diff --git a/fluffy/docs/the_fluffy_book/docs/quick-start-docker.md b/portal/docs/the_fluffy_book/docs/quick-start-docker.md similarity index 82% rename from fluffy/docs/the_fluffy_book/docs/quick-start-docker.md rename to portal/docs/the_fluffy_book/docs/quick-start-docker.md index a6919a470e..51c1cfcd71 100644 --- a/fluffy/docs/the_fluffy_book/docs/quick-start-docker.md +++ b/portal/docs/the_fluffy_book/docs/quick-start-docker.md @@ -1,6 +1,6 @@ # Quick start - Docker -This page takes you through the steps of getting the Fluffy Portal node running +This page takes you through the steps of getting the Nimbus Portal client running on the public network by use of the [public Docker image](https://hub.docker.com/r/statusim/nimbus-fluffy/tags). The Docker image gets rebuild from latest master every night and only `amd64` is supported currently. @@ -10,7 +10,7 @@ The Docker image gets rebuild from latest master every night and only `amd64` is ### Prerequisites - [Docker](https://www.docker.com/) -### Use the Docker image to run a Fluffy client on the Portal network +### Use the Docker image to run the Nimbus Portal client on the Portal network ```bash # Connect to the Portal bootstrap nodes and enable the JSON-RPC APIs. @@ -18,7 +18,7 @@ docker container run -p 8545:8545 statusim/nimbus-fluffy:amd64-master-latest --r ``` !!! note Port 8545 is published and `rpc-address` is set to the `ANY` address in this command to allow access to the JSON-RPC API from outside the Docker image. You might want to adjust that depending on the use case & security model. - It is also recommended to use a mounted volume for Fluffy's `--data-dir` in case of a long-running container. + It is also recommended to use a mounted volume for `nimbus_portal_client`'s `--data-dir` in case of a long-running container. ### Try requesting an execution layer block from the network diff --git a/fluffy/docs/the_fluffy_book/docs/quick-start-windows.md b/portal/docs/the_fluffy_book/docs/quick-start-windows.md similarity index 79% rename from fluffy/docs/the_fluffy_book/docs/quick-start-windows.md rename to portal/docs/the_fluffy_book/docs/quick-start-windows.md index c2dfa8fb16..4b1015496c 100644 --- a/fluffy/docs/the_fluffy_book/docs/quick-start-windows.md +++ b/portal/docs/the_fluffy_book/docs/quick-start-windows.md @@ -1,6 +1,6 @@ # Quick start - Windows -This page takes you through the steps of getting the Fluffy Portal node running +This page takes you through the steps of getting the Nimbus Portal node running on the public network. The guide assumes Windows is being used. For Linux/macOS users follow this @@ -15,25 +15,25 @@ If you need help installing these tools, you can consult our [prerequisites page](./prerequisites.md). !!! note - To build Fluffy on Windows, the MinGW-w64 build environment is recommended. + To build the Nimbus Portal client on Windows, the MinGW-w64 build environment is recommended. The build commands in the rest of this page assume the MinGW build environment is used. -### Build the Fluffy client +### Build the Nimbus Portal client ```bash git clone https://github.com/status-im/nimbus-eth1.git cd nimbus-eth1 -mingw32-make fluffy +mingw32-make nimbus_portal_client # Test if binary was successfully build by running the help command. -./build/fluffy --help +./build/nimbus_portal_client --help ``` -### Run a Fluffy client on the Portal network +### Run a Nimbus Portal client on the Portal network ```bash # Connect to the Portal bootstrap nodes and enable the JSON-RPC APIs -./build/fluffy --rpc +./build/nimbus_portal_client --rpc ``` ### Try requesting an execution layer block from the network @@ -47,7 +47,7 @@ BLOCKHASH=0x55b11b918355b1ef9c5db810302ebad0bf2544255b530cdce90674d5887bb286 # R curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"eth_getBlockByHash","params":["'${BLOCKHASH}'", true]}' http://localhost:8545 ``` -### Update and rebuild the Fluffy client +### Update and rebuild the Nimbus Portal client In order to stay up to date you can pull the latest version from our master branch. There are currently released versions tagged. @@ -57,5 +57,5 @@ git pull # To bring the git submodules up to date mingw32-make update -mingw32-make fluffy +mingw32-make nimbus_portal_client ``` diff --git a/fluffy/docs/the_fluffy_book/docs/quick-start.md b/portal/docs/the_fluffy_book/docs/quick-start.md similarity index 82% rename from fluffy/docs/the_fluffy_book/docs/quick-start.md rename to portal/docs/the_fluffy_book/docs/quick-start.md index e1c9b01703..12ff9cdb2c 100644 --- a/fluffy/docs/the_fluffy_book/docs/quick-start.md +++ b/portal/docs/the_fluffy_book/docs/quick-start.md @@ -1,6 +1,6 @@ # Quick start - Linux/macOS -This page takes you through the steps of getting the Fluffy Portal node running +This page takes you through the steps of getting the Nimbus Portal node running on the public network. The guide assumes Linux or macOS is being used. For Windows users follow this @@ -14,21 +14,21 @@ The guide assumes Linux or macOS is being used. For Windows users follow this If you need help installing these tools, you can consult our [prerequisites page](./prerequisites.md). -### Build the Fluffy client +### Build the Nimbus Portal client ```bash git clone https://github.com/status-im/nimbus-eth1.git cd nimbus-eth1 -make fluffy +make nimbus_portal_client # Test if binary was successfully build by running the help command. -./build/fluffy --help +./build/nimbus_portal_client --help ``` -### Run a Fluffy client on the Portal network +### Run a Nimbus Portal client on the Portal network ```bash # Connect to the Portal bootstrap nodes and enable the JSON-RPC APIs -./build/fluffy --rpc +./build/nimbus_portal_client --rpc ``` ### Try requesting an execution layer block from the network @@ -42,7 +42,7 @@ BLOCKHASH=0x55b11b918355b1ef9c5db810302ebad0bf2544255b530cdce90674d5887bb286 # R curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"eth_getBlockByHash","params":["'${BLOCKHASH}'", true]}' http://localhost:8545 ``` -### Update and rebuild the Fluffy client +### Update and rebuild the Nimbus Portal client In order to stay up to date you can pull the latest version from our master branch. There are currently released versions tagged. @@ -52,5 +52,5 @@ git pull # To bring the git submodules up to date make update -make fluffy +make nimbus_portal_client ``` diff --git a/fluffy/docs/the_fluffy_book/docs/run-local-testnet.md b/portal/docs/the_fluffy_book/docs/run-local-testnet.md similarity index 64% rename from fluffy/docs/the_fluffy_book/docs/run-local-testnet.md rename to portal/docs/the_fluffy_book/docs/run-local-testnet.md index 144ab25791..f8d35106c5 100644 --- a/fluffy/docs/the_fluffy_book/docs/run-local-testnet.md +++ b/portal/docs/the_fluffy_book/docs/run-local-testnet.md @@ -9,12 +9,12 @@ on them through the JSON-RPC API. ```bash # Run the script, default start 3 nodes -./fluffy/scripts/launch_local_testnet.sh +./portal/scripts/launch_local_testnet.sh # Run the script with 16 nodes -./fluffy/scripts/launch_local_testnet.sh -n 16 +./portal/scripts/launch_local_testnet.sh -n 16 # See the script help -./fluffy/scripts/launch_local_testnet.sh --help +./portal/scripts/launch_local_testnet.sh --help ``` The nodes will be started and all nodes will use `node0` as bootstrap node. @@ -24,8 +24,8 @@ The `data-dir`s and logs of each node can be found in `./local_testnet_data/`. You can manually start extra nodes that connect to the network by providing any of the running nodes their ENR. -E.g. to manually add a Fluffy node to the local testnet run: +E.g. to manually add a Portal node to the local testnet run: ```bash -./build/fluffy --rpc --network:none --udp-port:9010 --nat:extip:127.0.0.1 --bootstrap-node:`cat ./local_testnet_data/node0/fluffy_node.enr` +./build/nimbus_portal_client --rpc --network:none --udp-port:9010 --nat:extip:127.0.0.1 --bootstrap-node:`cat ./local_testnet_data/node0/portal_node.enr` ``` diff --git a/fluffy/docs/the_fluffy_book/docs/state-content-bridging.md b/portal/docs/the_fluffy_book/docs/state-content-bridging.md similarity index 82% rename from fluffy/docs/the_fluffy_book/docs/state-content-bridging.md rename to portal/docs/the_fluffy_book/docs/state-content-bridging.md index 2247edf1cc..30c0091cfa 100644 --- a/fluffy/docs/the_fluffy_book/docs/state-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/state-content-bridging.md @@ -2,25 +2,25 @@ ## Seeding from content bridges -### Seeding state data with the `portal_bridge` +### Seeding state data with the `nimbus_portal_bridge` #### Step 1: Run a Portal client -Run a Portal client with the Portal JSON-RPC API enabled (e.g. Fluffy) and enable the `state` subnetwork: +Run a Portal client with the Portal JSON-RPC API enabled (e.g. Nimbus Portal client) and enable the `state` subnetwork: ```bash -./build/fluffy --rpc --storage-capacity:0 --portal-subnetworks:state +./build/nimbus_portal_client --rpc --storage-capacity:0 --portal-subnetworks:state ``` > Note: The `--storage-capacity:0` option is not required, but it is added here for the use case where the node's only focus is on gossiping content from the -`portal_bridge`. +`nimbus_portal_bridge`. #### Step 2: Run an EL client (archive node) that supports `trace_replayBlockTransactions` -The `portal_bridge` needs access to the EL JSON-RPC API, either through a local +The `nimbus_portal_bridge` needs access to the EL JSON-RPC API, either through a local Ethereum client or via a web3 provider. Currently the portal state bridge requires access to the following EL JSON-RPC APIs: @@ -39,12 +39,12 @@ to ensure that the state is available for all the historical blocks being synced #### Step 3: Run the Portal bridge in state mode -Build & run the `portal_bridge`: +Build & run the `nimbus_portal_bridge`: ```bash -make portal_bridge +make nimbus_portal_bridge WEB3_URL="ws://127.0.0.1:8548" # Replace with your provider. -./build/portal_bridge state --web3-url:${WEB3_URL} --start-block=1 --gossip-workers=2 +./build/nimbus_portal_bridge state --web3-url:${WEB3_URL} --start-block=1 --gossip-workers=2 ``` > Note: A WebSocket connection to the web3 provider is recommended to improve the @@ -60,7 +60,7 @@ The `--gossip-workers` parameter can be used to set the number of workers that w gossip the portal state data into the portal state subnetwork. Each worker handles gossipping the state for a single block and the workers gossip the data concurrently. It is recommended to increase the number of workers in order to increase the speed -and throughput of the gossiping process up until the point where Fluffy is unable +and throughput of the gossiping process up until the point where `nimbus_portal_bridge` is unable keep up. The optional `--verify-gossip` parameter can be used to verify that the state data has diff --git a/fluffy/docs/the_fluffy_book/docs/stylesheets/extra.css b/portal/docs/the_fluffy_book/docs/stylesheets/extra.css similarity index 100% rename from fluffy/docs/the_fluffy_book/docs/stylesheets/extra.css rename to portal/docs/the_fluffy_book/docs/stylesheets/extra.css diff --git a/portal/docs/the_fluffy_book/docs/test-suite.md b/portal/docs/the_fluffy_book/docs/test-suite.md new file mode 100644 index 0000000000..53050d9e09 --- /dev/null +++ b/portal/docs/the_fluffy_book/docs/test-suite.md @@ -0,0 +1,15 @@ +# Nimbus Portal test suite + +## Run the Nimbus Portal test suite +```bash +# From the nimbus-eth1 repository +make portal-test +``` + +## Run Nimbus Portal local testnet script +```bash +./portal/scripts/launch_local_testnet.sh --run-tests +``` + +Find more details on the usage and workings of the local testnet script +[here](./run-local-testnet.md). diff --git a/fluffy/docs/the_fluffy_book/docs/testnet-beacon-network.md b/portal/docs/the_fluffy_book/docs/testnet-beacon-network.md similarity index 65% rename from fluffy/docs/the_fluffy_book/docs/testnet-beacon-network.md rename to portal/docs/the_fluffy_book/docs/testnet-beacon-network.md index 5eaa2671b8..3ef914fb7d 100644 --- a/fluffy/docs/the_fluffy_book/docs/testnet-beacon-network.md +++ b/portal/docs/the_fluffy_book/docs/testnet-beacon-network.md @@ -5,7 +5,7 @@ network bridge in order to test if all nodes can do the beacon light client sync and stay up to date with the latest head of the chain. To accomodate this, the `launch_local_testnet.sh` script has the option to -launch the Fluffy `portal_bridge` automatically and connect it to `node0` +launch the `nimbus_portal_bridge` automatically and connect it to `node0` of the local tesnet. ## Run the local testnet script with bridge @@ -15,34 +15,34 @@ The `launch_local_testnet.sh` script must be launched with the The individual nodes will be started with this `trusted-block-root` and each node will try to start sync from this block root. -Run the following command to launch the network with the `portal_bridge` +Run the following command to launch the network with the `nimbus_portal_bridge` activated for the beacon network. ```bash TRUSTED_BLOCK_ROOT=0x1234567890123456789012345678901234567890123456789012345678901234 # Replace with trusted block root. -# Run the script, start 8 nodes + portal_bridge -./fluffy/scripts/launch_local_testnet.sh -n8 --trusted-block-root ${TRUSTED_BLOCK_ROOT} --portal-bridge +# Run the script, start 8 nodes + nimbus_portal_bridge +./portal/scripts/launch_local_testnet.sh -n8 --trusted-block-root ${TRUSTED_BLOCK_ROOT} --portal-bridge ``` ## Run the local testnet script and launch the bridge manually -To have control over when to start or restart the `portal_bridge` on can +To have control over when to start or restart the `nimbus_portal_bridge` on can also control the bridge manually, e.g. start the testnet: ```bash TRUSTED_BLOCK_ROOT=0x1234567890123456789012345678901234567890123456789012345678901234 # Replace with trusted block root. # Run the script, start 8 nodes -./fluffy/scripts/launch_local_testnet.sh -n8 --trusted-block-root ${TRUSTED_BLOCK_ROOT} +./portal/scripts/launch_local_testnet.sh -n8 --trusted-block-root ${TRUSTED_BLOCK_ROOT} ``` -Next, build and run the `portal_bridge` for the beacon network: +Next, build and run the `nimbus_portal_bridge` for the beacon network: ```bash -make portal_bridge +make nimbus_portal_bridge # --rpc-port 10000 = default node0 # --rest-url = access to beacon node API, default http://127.0.0.1:5052 -./build/portal_bridge beacon --trusted-block-root:${TRUSTED_BLOCK_ROOT} --rest-url:http://127.0.0.1:5052 --backfill-amount:128 --portal-rpc-url:http://127.0.0.1:10000 +./build/nimbus_portal_bridge beacon --trusted-block-root:${TRUSTED_BLOCK_ROOT} --rest-url:http://127.0.0.1:5052 --backfill-amount:128 --portal-rpc-url:http://127.0.0.1:10000 ``` diff --git a/fluffy/docs/the_fluffy_book/docs/testnet-history-network.md b/portal/docs/the_fluffy_book/docs/testnet-history-network.md similarity index 98% rename from fluffy/docs/the_fluffy_book/docs/testnet-history-network.md rename to portal/docs/the_fluffy_book/docs/testnet-history-network.md index b4cb16d61b..efc7cc4eb9 100644 --- a/fluffy/docs/the_fluffy_book/docs/testnet-history-network.md +++ b/portal/docs/the_fluffy_book/docs/testnet-history-network.md @@ -11,7 +11,7 @@ nodes will be killed. ```bash # Run the script, default start 64 nodes and run history tests -./fluffy/scripts/launch_local_testnet.sh --run-tests +./portal/scripts/launch_local_testnet.sh --run-tests ``` ## Details of the `test_portal_testnet` test diff --git a/fluffy/docs/the_fluffy_book/docs/upgrade.md b/portal/docs/the_fluffy_book/docs/upgrade.md similarity index 52% rename from fluffy/docs/the_fluffy_book/docs/upgrade.md rename to portal/docs/the_fluffy_book/docs/upgrade.md index d51dc1f32d..02f0e14a95 100644 --- a/fluffy/docs/the_fluffy_book/docs/upgrade.md +++ b/portal/docs/the_fluffy_book/docs/upgrade.md @@ -1,7 +1,7 @@ # Upgrade To upgrade to the latest version you need to update the nimbus-eth1 repository -and re-compile Fluffy. +and re-compile the Nimbus Portal client. !!! note In this state of development there are no official releases yet nor git tags @@ -9,7 +9,7 @@ and re-compile Fluffy. ## Upgrade to the latest version -Upgrading Fluffy when built from source is similar to the installation process. +Upgrading the Nimbus Portal client when built from source is similar to the installation process. Run: @@ -17,12 +17,12 @@ Run: # Download the updated source code git pull && make update -# Build Fluffy from the newly updated source -make -j4 fluffy +# Build from the newly updated source +make -j4 nimbus_portal_client ``` Complete the upgrade by restarting the node. !!! tip - To check which version of Fluffy you're currently running, run - `./build/fluffy --version` + To check which version of `nimbus_portal_client` you're currently running, run + `./build/nimbus_portal_client --version` diff --git a/fluffy/docs/the_fluffy_book/mkdocs.yml b/portal/docs/the_fluffy_book/mkdocs.yml similarity index 94% rename from fluffy/docs/the_fluffy_book/mkdocs.yml rename to portal/docs/the_fluffy_book/mkdocs.yml index 3629ff2f91..2cb540de2a 100644 --- a/fluffy/docs/the_fluffy_book/mkdocs.yml +++ b/portal/docs/the_fluffy_book/mkdocs.yml @@ -1,11 +1,11 @@ -# Nimbus Fluffy book +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. -site_name: The Nimbus Fluffy Guide +site_name: The Nimbus Portal Client Guide theme: name: material features: @@ -34,7 +34,7 @@ extra_css: # Support urls previously used by mdbook use_directory_urls: false repo_url: https://github.com/status-im/nimbus-eth1 -edit_uri: edit/master/fluffy/docs/the_fluffy_book/docs +edit_uri: edit/master/portal/docs/the_fluffy_book/docs docs_dir: docs plugins: @@ -98,6 +98,6 @@ nav: - 'run-local-testnet.md' - 'testnet-history-network.md' - 'testnet-beacon-network.md' - - 'fluffy-with-hive.md' + - 'nimbus-portal-with-hive.md' - 'protocol-interop-testing.md' - 'eth-data-exporter.md' diff --git a/fluffy/eth_data/era1.nim b/portal/eth_data/era1.nim similarity index 99% rename from fluffy/eth_data/era1.nim rename to portal/eth_data/era1.nim index 00363941cf..c6b06928be 100644 --- a/fluffy/eth_data/era1.nim +++ b/portal/eth_data/era1.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/eth_data/history_data_json_store.nim b/portal/eth_data/history_data_json_store.nim similarity index 100% rename from fluffy/eth_data/history_data_json_store.nim rename to portal/eth_data/history_data_json_store.nim diff --git a/fluffy/eth_data/history_data_seeding.nim b/portal/eth_data/history_data_seeding.nim similarity index 100% rename from fluffy/eth_data/history_data_seeding.nim rename to portal/eth_data/history_data_seeding.nim diff --git a/fluffy/eth_data/history_data_ssz_e2s.nim b/portal/eth_data/history_data_ssz_e2s.nim similarity index 100% rename from fluffy/eth_data/history_data_ssz_e2s.nim rename to portal/eth_data/history_data_ssz_e2s.nim diff --git a/fluffy/eth_data/yaml_eth_types.nim b/portal/eth_data/yaml_eth_types.nim similarity index 99% rename from fluffy/eth_data/yaml_eth_types.nim rename to portal/eth_data/yaml_eth_types.nim index a4d597a11f..f80fbb53c8 100644 --- a/fluffy/eth_data/yaml_eth_types.nim +++ b/portal/eth_data/yaml_eth_types.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/eth_data/yaml_utils.nim b/portal/eth_data/yaml_utils.nim similarity index 96% rename from fluffy/eth_data/yaml_utils.nim rename to portal/eth_data/yaml_utils.nim index f2f2ca9f3f..01732ba521 100644 --- a/fluffy/eth_data/yaml_utils.nim +++ b/portal/eth_data/yaml_utils.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/evm/async_evm.nim b/portal/evm/async_evm.nim similarity index 99% rename from fluffy/evm/async_evm.nim rename to portal/evm/async_evm.nim index d63d3221d1..a286f4d5d8 100644 --- a/fluffy/evm/async_evm.nim +++ b/portal/evm/async_evm.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/evm/async_evm_backend.nim b/portal/evm/async_evm_backend.nim similarity index 99% rename from fluffy/evm/async_evm_backend.nim rename to portal/evm/async_evm_backend.nim index 862066fbe6..c495090187 100644 --- a/fluffy/evm/async_evm_backend.nim +++ b/portal/evm/async_evm_backend.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/evm/async_evm_portal_backend.nim b/portal/evm/async_evm_portal_backend.nim similarity index 99% rename from fluffy/evm/async_evm_portal_backend.nim rename to portal/evm/async_evm_portal_backend.nim index 616ba56259..7134b442a7 100644 --- a/fluffy/evm/async_evm_portal_backend.nim +++ b/portal/evm/async_evm_portal_backend.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/logging.nim b/portal/logging.nim similarity index 90% rename from fluffy/logging.nim rename to portal/logging.nim index 3484674242..ade84016a8 100644 --- a/fluffy/logging.nim +++ b/portal/logging.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/grafana/fluffy_grafana_dashboard.json b/portal/metrics/grafana/fluffy_grafana_dashboard.json similarity index 100% rename from fluffy/grafana/fluffy_grafana_dashboard.json rename to portal/metrics/grafana/fluffy_grafana_dashboard.json diff --git a/fluffy/network/beacon/beacon_chain_historical_roots.nim b/portal/network/beacon/beacon_chain_historical_roots.nim similarity index 100% rename from fluffy/network/beacon/beacon_chain_historical_roots.nim rename to portal/network/beacon/beacon_chain_historical_roots.nim diff --git a/fluffy/network/beacon/beacon_chain_historical_summaries.nim b/portal/network/beacon/beacon_chain_historical_summaries.nim similarity index 99% rename from fluffy/network/beacon/beacon_chain_historical_summaries.nim rename to portal/network/beacon/beacon_chain_historical_summaries.nim index 7f262c2d4f..18cebc23e6 100644 --- a/fluffy/network/beacon/beacon_chain_historical_summaries.nim +++ b/portal/network/beacon/beacon_chain_historical_summaries.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/beacon/beacon_content.nim b/portal/network/beacon/beacon_content.nim similarity index 88% rename from fluffy/network/beacon/beacon_content.nim rename to portal/network/beacon/beacon_content.nim index cb9b4f8e91..6443704675 100644 --- a/fluffy/network/beacon/beacon_content.nim +++ b/portal/network/beacon/beacon_content.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/beacon/beacon_db.nim b/portal/network/beacon/beacon_db.nim similarity index 99% rename from fluffy/network/beacon/beacon_db.nim rename to portal/network/beacon/beacon_db.nim index b4758ba81c..c788d4ea09 100644 --- a/fluffy/network/beacon/beacon_db.nim +++ b/portal/network/beacon/beacon_db.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/beacon/beacon_init_loader.nim b/portal/network/beacon/beacon_init_loader.nim similarity index 96% rename from fluffy/network/beacon/beacon_init_loader.nim rename to portal/network/beacon/beacon_init_loader.nim index e5f03ed058..b475820c3a 100644 --- a/fluffy/network/beacon/beacon_init_loader.nim +++ b/portal/network/beacon/beacon_init_loader.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/beacon/beacon_light_client.nim b/portal/network/beacon/beacon_light_client.nim similarity index 100% rename from fluffy/network/beacon/beacon_light_client.nim rename to portal/network/beacon/beacon_light_client.nim diff --git a/fluffy/network/beacon/beacon_light_client_manager.nim b/portal/network/beacon/beacon_light_client_manager.nim similarity index 99% rename from fluffy/network/beacon/beacon_light_client_manager.nim rename to portal/network/beacon/beacon_light_client_manager.nim index 95e663d195..4d432bbaad 100644 --- a/fluffy/network/beacon/beacon_light_client_manager.nim +++ b/portal/network/beacon/beacon_light_client_manager.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim similarity index 99% rename from fluffy/network/beacon/beacon_network.nim rename to portal/network/beacon/beacon_network.nim index ddcbb14949..138f5a7c45 100644 --- a/fluffy/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/beacon/beacon_validation.nim b/portal/network/beacon/beacon_validation.nim similarity index 93% rename from fluffy/network/beacon/beacon_validation.nim rename to portal/network/beacon/beacon_validation.nim index 9724221b60..b4d6a4175f 100644 --- a/fluffy/network/beacon/beacon_validation.nim +++ b/portal/network/beacon/beacon_validation.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/beacon/content/content_keys.nim b/portal/network/beacon/content/content_keys.nim similarity index 98% rename from fluffy/network/beacon/content/content_keys.nim rename to portal/network/beacon/content/content_keys.nim index 6998346e2b..2f2b4cc25a 100644 --- a/fluffy/network/beacon/content/content_keys.nim +++ b/portal/network/beacon/content/content_keys.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/beacon/content/content_values.nim b/portal/network/beacon/content/content_values.nim similarity index 98% rename from fluffy/network/beacon/content/content_values.nim rename to portal/network/beacon/content/content_values.nim index 208506c345..96f57e26f6 100644 --- a/fluffy/network/beacon/content/content_values.nim +++ b/portal/network/beacon/content/content_values.nim @@ -1,5 +1,5 @@ -# Fluffy - Portal Network -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/history/content/content_keys.nim b/portal/network/history/content/content_keys.nim similarity index 99% rename from fluffy/network/history/content/content_keys.nim rename to portal/network/history/content/content_keys.nim index 1d6bc40caf..4b6c9ea0e2 100644 --- a/fluffy/network/history/content/content_keys.nim +++ b/portal/network/history/content/content_keys.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/content/content_values.nim b/portal/network/history/content/content_values.nim similarity index 99% rename from fluffy/network/history/content/content_values.nim rename to portal/network/history/content/content_values.nim index a2884d0069..c1af5821b5 100644 --- a/fluffy/network/history/content/content_values.nim +++ b/portal/network/history/content/content_values.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/content/content_values_deprecated.nim b/portal/network/history/content/content_values_deprecated.nim similarity index 99% rename from fluffy/network/history/content/content_values_deprecated.nim rename to portal/network/history/content/content_values_deprecated.nim index 96155135f3..2cd41377fa 100644 --- a/fluffy/network/history/content/content_values_deprecated.nim +++ b/portal/network/history/content/content_values_deprecated.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/history_content.nim b/portal/network/history/history_content.nim similarity index 100% rename from fluffy/network/history/history_content.nim rename to portal/network/history/history_content.nim diff --git a/fluffy/network/history/history_network.nim b/portal/network/history/history_network.nim similarity index 99% rename from fluffy/network/history/history_network.nim rename to portal/network/history/history_network.nim index 76c10c2499..b0ff0a84ac 100644 --- a/fluffy/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -17,7 +17,7 @@ import beacon_chain/spec/presets, ../../common/common_types, ../../database/content_db, - ../../network_metadata, + ../network_metadata, ../wire/[portal_protocol, portal_stream, portal_protocol_config, ping_extensions], "."/[history_content, history_validation, history_type_conversions], ../beacon/beacon_chain_historical_roots diff --git a/fluffy/network/history/history_type_conversions.nim b/portal/network/history/history_type_conversions.nim similarity index 98% rename from fluffy/network/history/history_type_conversions.nim rename to portal/network/history/history_type_conversions.nim index 0cd3c3276a..fb34886b31 100644 --- a/fluffy/network/history/history_type_conversions.nim +++ b/portal/network/history/history_type_conversions.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/history/history_validation.nim b/portal/network/history/history_validation.nim similarity index 99% rename from fluffy/network/history/history_validation.nim rename to portal/network/history/history_validation.nim index 3c1413d9b4..4823de66df 100644 --- a/fluffy/network/history/history_validation.nim +++ b/portal/network/history/history_validation.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -11,7 +11,7 @@ import chronos/timer, eth/trie/ordered_trie, beacon_chain/spec/presets, - ../../network_metadata, + ../network_metadata, ../beacon/beacon_db, ./history_type_conversions, ./validation/[ diff --git a/fluffy/network/history/validation/block_proof_common.nim b/portal/network/history/validation/block_proof_common.nim similarity index 99% rename from fluffy/network/history/validation/block_proof_common.nim rename to portal/network/history/validation/block_proof_common.nim index c660fe6ea5..8412d6c915 100644 --- a/fluffy/network/history/validation/block_proof_common.nim +++ b/portal/network/history/validation/block_proof_common.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/validation/block_proof_historical_hashes_accumulator.nim b/portal/network/history/validation/block_proof_historical_hashes_accumulator.nim similarity index 100% rename from fluffy/network/history/validation/block_proof_historical_hashes_accumulator.nim rename to portal/network/history/validation/block_proof_historical_hashes_accumulator.nim diff --git a/fluffy/network/history/validation/block_proof_historical_roots.nim b/portal/network/history/validation/block_proof_historical_roots.nim similarity index 99% rename from fluffy/network/history/validation/block_proof_historical_roots.nim rename to portal/network/history/validation/block_proof_historical_roots.nim index 9355644c57..21bcc9317f 100644 --- a/fluffy/network/history/validation/block_proof_historical_roots.nim +++ b/portal/network/history/validation/block_proof_historical_roots.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/validation/block_proof_historical_summaries.nim b/portal/network/history/validation/block_proof_historical_summaries.nim similarity index 99% rename from fluffy/network/history/validation/block_proof_historical_summaries.nim rename to portal/network/history/validation/block_proof_historical_summaries.nim index 2cdc2e7d90..f2832cd1b1 100644 --- a/fluffy/network/history/validation/block_proof_historical_summaries.nim +++ b/portal/network/history/validation/block_proof_historical_summaries.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/history/validation/historical_hashes_accumulator.nim b/portal/network/history/validation/historical_hashes_accumulator.nim similarity index 98% rename from fluffy/network/history/validation/historical_hashes_accumulator.nim rename to portal/network/history/validation/historical_hashes_accumulator.nim index d9d61c24db..6c474fa86d 100644 --- a/fluffy/network/history/validation/historical_hashes_accumulator.nim +++ b/portal/network/history/validation/historical_hashes_accumulator.nim @@ -52,7 +52,7 @@ type EpochRecord* = List[HeaderRecord, EPOCH_SIZE] - # In the core code of Fluffy the `EpochRecord` type is solely used, as + # In the core code of the node the `EpochRecord` type is solely used, as # `hash_tree_root` is done either once or never on this object after # serialization. # However for the generation of the proofs for all the headers in an epoch, it diff --git a/fluffy/network_metadata.nim b/portal/network/network_metadata.nim similarity index 96% rename from fluffy/network_metadata.nim rename to portal/network/network_metadata.nim index 01712bfb13..825e031fc8 100644 --- a/fluffy/network_metadata.nim +++ b/portal/network/network_metadata.nim @@ -13,7 +13,7 @@ import stew/io2, chronos/timer, beacon_chain/spec/forks, - ./network/history/validation/historical_hashes_accumulator + ./history/validation/historical_hashes_accumulator proc loadBootstrapNodes(path: string): seq[string] {.raises: [IOError].} = # Read a list of ENR URIs from a file containing a flat list of entries. @@ -30,7 +30,7 @@ proc loadCompileTimeBootstrapNodes(path: string): seq[string] = const portalConfigDir = - currentSourcePath.parentDir.parentDir.replace('\\', '/') / "vendor" / + currentSourcePath.parentDir.parentDir.parentDir.replace('\\', '/') / "vendor" / "portal-mainnet" / "config" # Note: # These are the bootstrap nodes for the Portal mainnet. diff --git a/fluffy/portal_node.nim b/portal/network/portal_node.nim similarity index 96% rename from fluffy/portal_node.nim rename to portal/network/portal_node.nim index b4697f1046..b534552c83 100644 --- a/fluffy/portal_node.nim +++ b/portal/network/portal_node.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -13,13 +13,13 @@ import eth/p2p/discoveryv5/protocol, beacon_chain/spec/forks, stew/byteutils, + ../eth_data/history_data_ssz_e2s, + ../database/content_db, ./network_metadata, - ./eth_data/history_data_ssz_e2s, - ./database/content_db, - ./network/wire/[portal_stream, portal_protocol_config], - ./network/beacon/[beacon_init_loader, beacon_light_client], - ./network/history/[history_network, history_content], - ./network/state/[state_network, state_content] + ./wire/[portal_stream, portal_protocol_config], + ./beacon/[beacon_init_loader, beacon_light_client], + ./history/[history_network, history_content], + ./state/[state_network, state_content] export beacon_light_client, history_network, state_network, portal_protocol_config, forks diff --git a/fluffy/network/state/content/content_keys.nim b/portal/network/state/content/content_keys.nim similarity index 98% rename from fluffy/network/state/content/content_keys.nim rename to portal/network/state/content/content_keys.nim index c1b10fde78..d4279803cc 100644 --- a/fluffy/network/state/content/content_keys.nim +++ b/portal/network/state/content/content_keys.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/content/content_values.nim b/portal/network/state/content/content_values.nim similarity index 98% rename from fluffy/network/state/content/content_values.nim rename to portal/network/state/content/content_values.nim index 4e33162a83..b5c9bd20e7 100644 --- a/fluffy/network/state/content/content_values.nim +++ b/portal/network/state/content/content_values.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/content/nibbles.nim b/portal/network/state/content/nibbles.nim similarity index 97% rename from fluffy/network/state/content/nibbles.nim rename to portal/network/state/content/nibbles.nim index 0f3c66016c..086c3a2904 100644 --- a/fluffy/network/state/content/nibbles.nim +++ b/portal/network/state/content/nibbles.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/state_content.nim b/portal/network/state/state_content.nim similarity index 87% rename from fluffy/network/state/state_content.nim rename to portal/network/state/state_content.nim index fc744315da..0e6207ef6a 100644 --- a/fluffy/network/state/state_content.nim +++ b/portal/network/state/state_content.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/state_endpoints.nim b/portal/network/state/state_endpoints.nim similarity index 99% rename from fluffy/network/state/state_endpoints.nim rename to portal/network/state/state_endpoints.nim index 6e3c5c1297..2e5e911e66 100644 --- a/fluffy/network/state/state_endpoints.nim +++ b/portal/network/state/state_endpoints.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/state/state_gossip.nim b/portal/network/state/state_gossip.nim similarity index 98% rename from fluffy/network/state/state_gossip.nim rename to portal/network/state/state_gossip.nim index 3c78c383b5..029e4e5d74 100644 --- a/fluffy/network/state/state_gossip.nim +++ b/portal/network/state/state_gossip.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/state_network.nim b/portal/network/state/state_network.nim similarity index 100% rename from fluffy/network/state/state_network.nim rename to portal/network/state/state_network.nim diff --git a/fluffy/network/state/state_utils.nim b/portal/network/state/state_utils.nim similarity index 98% rename from fluffy/network/state/state_utils.nim rename to portal/network/state/state_utils.nim index de2b93f51a..e30c5dbec8 100644 --- a/fluffy/network/state/state_utils.nim +++ b/portal/network/state/state_utils.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/network/state/state_validation.nim b/portal/network/state/state_validation.nim similarity index 99% rename from fluffy/network/state/state_validation.nim rename to portal/network/state/state_validation.nim index e465b54b3e..5c32b0a598 100644 --- a/fluffy/network/state/state_validation.nim +++ b/portal/network/state/state_validation.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/wire/README.md b/portal/network/wire/README.md similarity index 72% rename from fluffy/network/wire/README.md rename to portal/network/wire/README.md index 823c8b623e..884e432588 100644 --- a/fluffy/network/wire/README.md +++ b/portal/network/wire/README.md @@ -1,6 +1,6 @@ # Portal Network Wire Protocol ## Introduction -The `fluffy/network/wire` directory holds a Nim implementation of the +The `portal/network/wire` directory holds a Nim implementation of the [Portal Network Wire Protocol](https://github.com/ethereum/portal-network-specs/blob/31bc7e58e2e8acfba895d5a12a9ae3472894d398/state/state-network.md#wire-protocol). The wire protocol builds on top of the Node Discovery v5.1 protocol its @@ -10,22 +10,6 @@ For further information on the Nim implementation of the Node Discovery v5.1 protocol check out the [discv5](https://github.com/status-im/nim-eth/blob/master/doc/discv5.md) page. -## Test suite -To run the test suite specifically for the Portal wire protocol, run following -commands: -```sh -git clone https://github.com/status-im/nimbus-eth1.git -cd nimbus-eth1 - -# To bring the git submodules up to date -make update - -# Build & run Portal wire protocol encoding test -./env.sh nim c -r ./fluffy/tests/test_portal_wire_encoding -# Build & run Portal wire protocol network test -./env.sh nim c -r ./fluffy/tests/test_portal_wire_protocol -``` - ## portalcli This is a small command line application that allows you to run a node running Discovery v5.1 + Portal wire protocol. @@ -43,8 +27,8 @@ send one of the Portal message types, wait for the response, and then shut down. git clone https://github.com/status-im/nimbus-eth1.git cd nimbus-eth1 -# Build the fluffy tools -make fluffy-tools +# Build portalcli +make portalcli # See all options ./build/portalcli --help diff --git a/fluffy/network/wire/messages.nim b/portal/network/wire/messages.nim similarity index 100% rename from fluffy/network/wire/messages.nim rename to portal/network/wire/messages.nim diff --git a/fluffy/network/wire/ping_extensions.nim b/portal/network/wire/ping_extensions.nim similarity index 100% rename from fluffy/network/wire/ping_extensions.nim rename to portal/network/wire/ping_extensions.nim diff --git a/fluffy/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim similarity index 99% rename from fluffy/network/wire/portal_protocol.nim rename to portal/network/wire/portal_protocol.nim index cb40901f2b..58cbfbad6b 100644 --- a/fluffy/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/wire/portal_protocol_config.nim b/portal/network/wire/portal_protocol_config.nim similarity index 99% rename from fluffy/network/wire/portal_protocol_config.nim rename to portal/network/wire/portal_protocol_config.nim index 891cd39b49..50576f1aca 100644 --- a/fluffy/network/wire/portal_protocol_config.nim +++ b/portal/network/wire/portal_protocol_config.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/network/wire/portal_protocol_version.nim b/portal/network/wire/portal_protocol_version.nim similarity index 100% rename from fluffy/network/wire/portal_protocol_version.nim rename to portal/network/wire/portal_protocol_version.nim diff --git a/fluffy/network/wire/portal_stream.nim b/portal/network/wire/portal_stream.nim similarity index 99% rename from fluffy/network/wire/portal_stream.nim rename to portal/network/wire/portal_stream.nim index d5d3e88c4d..24014d6e1f 100644 --- a/fluffy/network/wire/portal_stream.nim +++ b/portal/network/wire/portal_stream.nim @@ -463,7 +463,7 @@ proc handleIncomingConnection( # TODO: Is there a scenario where this can happen, # considering `allowRegisteredIdCallback`? If not, doAssert? - var fut = newFuture[void]("fluffy.AcceptConnectionCallback") + var fut = newFuture[void]("nimbus_portal.AcceptConnectionCallback") fut.complete() return fut diff --git a/fluffy/nim.cfg b/portal/nim.cfg similarity index 98% rename from fluffy/nim.cfg rename to portal/nim.cfg index 3c97119ca2..93e93c66f0 100644 --- a/fluffy/nim.cfg +++ b/portal/nim.cfg @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/eth_rpc_client.nim b/portal/rpc/eth_rpc_client.nim similarity index 86% rename from fluffy/rpc/eth_rpc_client.nim rename to portal/rpc/eth_rpc_client.nim index dc432485a8..74edf0dced 100644 --- a/fluffy/rpc/eth_rpc_client.nim +++ b/portal/rpc/eth_rpc_client.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/rpc/portal_rpc_client.nim b/portal/rpc/portal_rpc_client.nim similarity index 98% rename from fluffy/rpc/portal_rpc_client.nim rename to portal/rpc/portal_rpc_client.nim index 55fe52d6ec..42359f7bb4 100644 --- a/fluffy/rpc/portal_rpc_client.nim +++ b/portal/rpc/portal_rpc_client.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/rpc/rpc_calls/rpc_debug_calls.nim b/portal/rpc/rpc_calls/rpc_debug_calls.nim similarity index 99% rename from fluffy/rpc/rpc_calls/rpc_debug_calls.nim rename to portal/rpc/rpc_calls/rpc_debug_calls.nim index f216220c41..5742b40df8 100644 --- a/fluffy/rpc/rpc_calls/rpc_debug_calls.nim +++ b/portal/rpc/rpc_calls/rpc_debug_calls.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_calls/rpc_discovery_calls.nim b/portal/rpc/rpc_calls/rpc_discovery_calls.nim similarity index 94% rename from fluffy/rpc/rpc_calls/rpc_discovery_calls.nim rename to portal/rpc/rpc_calls/rpc_discovery_calls.nim index 0d00b063df..d22029f3f0 100644 --- a/fluffy/rpc/rpc_calls/rpc_discovery_calls.nim +++ b/portal/rpc/rpc_calls/rpc_discovery_calls.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/rpc/rpc_calls/rpc_eth_calls.nim b/portal/rpc/rpc_calls/rpc_eth_calls.nim similarity index 99% rename from fluffy/rpc/rpc_calls/rpc_eth_calls.nim rename to portal/rpc/rpc_calls/rpc_eth_calls.nim index 8e2dd154c1..7da45cc9b3 100644 --- a/fluffy/rpc/rpc_calls/rpc_eth_calls.nim +++ b/portal/rpc/rpc_calls/rpc_eth_calls.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_calls/rpc_portal_calls.nim b/portal/rpc/rpc_calls/rpc_portal_calls.nim similarity index 99% rename from fluffy/rpc/rpc_calls/rpc_portal_calls.nim rename to portal/rpc/rpc_calls/rpc_portal_calls.nim index 175448e5bc..90b1f03980 100644 --- a/fluffy/rpc/rpc_calls/rpc_portal_calls.nim +++ b/portal/rpc/rpc_calls/rpc_portal_calls.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_calls/rpc_portal_debug_calls.nim b/portal/rpc/rpc_calls/rpc_portal_debug_calls.nim similarity index 92% rename from fluffy/rpc/rpc_calls/rpc_portal_debug_calls.nim rename to portal/rpc/rpc_calls/rpc_portal_debug_calls.nim index f7e50f4b3a..792dc19e86 100644 --- a/fluffy/rpc/rpc_calls/rpc_portal_debug_calls.nim +++ b/portal/rpc/rpc_calls/rpc_portal_debug_calls.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/rpc/rpc_calls/rpc_trace_calls.nim b/portal/rpc/rpc_calls/rpc_trace_calls.nim similarity index 90% rename from fluffy/rpc/rpc_calls/rpc_trace_calls.nim rename to portal/rpc/rpc_calls/rpc_trace_calls.nim index d507b5151b..96e4eae308 100644 --- a/fluffy/rpc/rpc_calls/rpc_trace_calls.nim +++ b/portal/rpc/rpc_calls/rpc_trace_calls.nim @@ -1,5 +1,5 @@ -# fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). diff --git a/fluffy/rpc/rpc_debug_api.nim b/portal/rpc/rpc_debug_api.nim similarity index 99% rename from fluffy/rpc/rpc_debug_api.nim rename to portal/rpc/rpc_debug_api.nim index 4332270b40..a269bea9d8 100644 --- a/fluffy/rpc/rpc_debug_api.nim +++ b/portal/rpc/rpc_debug_api.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_discovery_api.nim b/portal/rpc/rpc_discovery_api.nim similarity index 100% rename from fluffy/rpc/rpc_discovery_api.nim rename to portal/rpc/rpc_discovery_api.nim diff --git a/fluffy/rpc/rpc_eth_api.nim b/portal/rpc/rpc_eth_api.nim similarity index 99% rename from fluffy/rpc/rpc_eth_api.nim rename to portal/rpc/rpc_eth_api.nim index cbdac90341..71a71a1311 100644 --- a/fluffy/rpc/rpc_eth_api.nim +++ b/portal/rpc/rpc_eth_api.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). @@ -28,7 +28,7 @@ from eth/rlp import computeRlpHash export rpcserver # See the list of Ethereum execution JSON-RPC APIs which will be supported by -# Portal Network clients such as Fluffy: +# Portal Network clients: # https://github.com/ethereum/portal-network-specs?tab=readme-ov-file#the-json-rpc-api func init*( @@ -245,7 +245,7 @@ proc installEthApiHandlers*( return Quantity(txCount) - # Note: can't implement this yet as the fluffy node doesn't know the relation + # Note: can't implement this yet as the portal node doesn't know the relation # of tx hash -> block number -> block hash, in order to get the receipt # from from the block with that block hash. The Canonical Indices Network # would need to be implemented to get this information. diff --git a/fluffy/rpc/rpc_portal_beacon_api.nim b/portal/rpc/rpc_portal_beacon_api.nim similarity index 99% rename from fluffy/rpc/rpc_portal_beacon_api.nim rename to portal/rpc/rpc_portal_beacon_api.nim index 909e061d20..0c184ac299 100644 --- a/fluffy/rpc/rpc_portal_beacon_api.nim +++ b/portal/rpc/rpc_portal_beacon_api.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_portal_common_api.nim b/portal/rpc/rpc_portal_common_api.nim similarity index 99% rename from fluffy/rpc/rpc_portal_common_api.nim rename to portal/rpc/rpc_portal_common_api.nim index 44b14fbff3..ada2d77b08 100644 --- a/fluffy/rpc/rpc_portal_common_api.nim +++ b/portal/rpc/rpc_portal_common_api.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_portal_debug_history_api.nim b/portal/rpc/rpc_portal_debug_history_api.nim similarity index 89% rename from fluffy/rpc/rpc_portal_debug_history_api.nim rename to portal/rpc/rpc_portal_debug_history_api.nim index 2207d043b7..f29890d545 100644 --- a/fluffy/rpc/rpc_portal_debug_history_api.nim +++ b/portal/rpc/rpc_portal_debug_history_api.nim @@ -1,5 +1,5 @@ -# Fluffy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -16,7 +16,7 @@ import export rpcserver # Non-spec-RPCs that are used for seeding history content into the network without -# usage of the standalone portal_bridge. As source Era1 files are used. +# usage of the standalone nimbus_portal_bridge. As source Era1 files are used. proc installPortalDebugHistoryApiHandlers*(rpcServer: RpcServer, p: PortalProtocol) = ## Portal debug API calls related to storage and seeding from Era1 files. rpcServer.rpc("portal_debug_historyGossipHeaders") do( diff --git a/fluffy/rpc/rpc_portal_history_api.nim b/portal/rpc/rpc_portal_history_api.nim similarity index 99% rename from fluffy/rpc/rpc_portal_history_api.nim rename to portal/rpc/rpc_portal_history_api.nim index 463fdf091e..0d1a302003 100644 --- a/fluffy/rpc/rpc_portal_history_api.nim +++ b/portal/rpc/rpc_portal_history_api.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_portal_nimbus_beacon_api.nim b/portal/rpc/rpc_portal_nimbus_beacon_api.nim similarity index 91% rename from fluffy/rpc/rpc_portal_nimbus_beacon_api.nim rename to portal/rpc/rpc_portal_nimbus_beacon_api.nim index 9cc81dfe46..10365eb4eb 100644 --- a/fluffy/rpc/rpc_portal_nimbus_beacon_api.nim +++ b/portal/rpc/rpc_portal_nimbus_beacon_api.nim @@ -11,7 +11,7 @@ import json_rpc/rpcserver, ../network/beacon/beacon_network export rpcserver -# execution_chain/fluffy specific RPC methods for the Portal beacon network. +# nimbus portal specific RPC methods for the Portal beacon network. proc installPortalNimbusBeaconApiHandlers*(rpcServer: RpcServer, n: BeaconNetwork) = rpcServer.rpc("portal_nimbus_beaconSetTrustedBlockRoot") do(blockRoot: string) -> bool: let root = Digest.fromHex(blockRoot) diff --git a/fluffy/rpc/rpc_portal_state_api.nim b/portal/rpc/rpc_portal_state_api.nim similarity index 99% rename from fluffy/rpc/rpc_portal_state_api.nim rename to portal/rpc/rpc_portal_state_api.nim index a1f20811a0..2658a47c97 100644 --- a/fluffy/rpc/rpc_portal_state_api.nim +++ b/portal/rpc/rpc_portal_state_api.nim @@ -1,4 +1,4 @@ -# fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/fluffy/rpc/rpc_types.nim b/portal/rpc/rpc_types.nim similarity index 99% rename from fluffy/rpc/rpc_types.nim rename to portal/rpc/rpc_types.nim index bb25be59fe..2b0c2b9383 100644 --- a/fluffy/rpc/rpc_types.nim +++ b/portal/rpc/rpc_types.nim @@ -31,7 +31,7 @@ const msg: "The client has blocked users from specifying the payload for this extension", ) - # These errors are used by Fluffy but are not yet in the spec + # These errors are used by Nimbus Portal but are not yet in the spec InvalidContentKeyError* = (code: -32602, msg: "Invalid content key") InvalidContentValueError* = (code: -32602, msg: "Invalid content value") diff --git a/fluffy/scripts/launch_local_testnet.sh b/portal/scripts/launch_local_testnet.sh similarity index 95% rename from fluffy/scripts/launch_local_testnet.sh rename to portal/scripts/launch_local_testnet.sh index 9c3c973c65..8f8d6c2ace 100755 --- a/fluffy/scripts/launch_local_testnet.sh +++ b/portal/scripts/launch_local_testnet.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -# Copyright (c) 2021-2024 Status Research & Development GmbH. Licensed under +# Copyright (c) 2021-2025 Status Research & Development GmbH. Licensed under # either of: # - Apache License, version 2.0 # - MIT license @@ -8,7 +8,7 @@ # according to those terms. # This script is for a big part a copy of the nimbus-eth2 launch_local_testnet -# script. This script however does not expect fluffy nodes to exit 0 in the good +# script. This script however does not expect Portal nodes to exit 0 in the good # case, but instead the json-rpc interface is used to check whether certain # values are what we expect them to be. @@ -52,7 +52,7 @@ WS_ENABLED="0" REUSE_EXISTING_DATA_DIR="0" TIMEOUT_DURATION="0" KILL_OLD_PROCESSES="0" -SCRIPTS_DIR="fluffy/scripts/" +SCRIPTS_DIR="portal/scripts/" PORTAL_BRIDGE="0" TRUSTED_BLOCK_ROOT="" # REST_URL="http://127.0.0.1:5052" @@ -63,7 +63,7 @@ DISABLE_STATE_ROOT_VALIDATION="0" print_help() { cat < Date: Mon, 19 May 2025 12:11:54 +0700 Subject: [PATCH 018/138] Avoid persisting the same txFrame twice in updateBase (#3301) --- execution_chain/core/chain/forked_chain/chain_branch.nim | 3 +++ 1 file changed, 3 insertions(+) diff --git a/execution_chain/core/chain/forked_chain/chain_branch.nim b/execution_chain/core/chain/forked_chain/chain_branch.nim index 1ce992f8b3..326a3415a2 100644 --- a/execution_chain/core/chain/forked_chain/chain_branch.nim +++ b/execution_chain/core/chain/forked_chain/chain_branch.nim @@ -177,6 +177,9 @@ iterator everyNthBlock*(loc: BlockPos, step: uint64): TxFrameAndStateRoot = stateRoot: loc.stateRoot ) + # Don't add the above txFrame anymore + number -= min(number, step) + while not branch.isNil: let tailNumber = branch.tailNumber while tailNumber > step and number > tailNumber: From 54db56ee48b4dfd9431af326f0cd5b59c18b3db5 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 19 May 2025 17:34:08 +0700 Subject: [PATCH 019/138] Fix hoodi chain config (#3304) * hoodi chain config: fix shanghai time typo * Validate built in chain config * Override compiler side effect analysis --- execution_chain/common/chain_config.nim | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/execution_chain/common/chain_config.nim b/execution_chain/common/chain_config.nim index b4fbb49306..4a9116f0e7 100644 --- a/execution_chain/common/chain_config.nim +++ b/execution_chain/common/chain_config.nim @@ -600,7 +600,7 @@ func chainConfigForNetwork*(id: NetworkId): ChainConfig = londonBlock: Opt.some(0.BlockNumber), mergeNetsplitBlock: Opt.some(0.BlockNumber), terminalTotalDifficulty: Opt.some(0.u256), - shanghaiTime: Opt.some(10.EthTime), + shanghaiTime: Opt.some(0.EthTime), cancunTime: Opt.some(0.EthTime), pragueTime: Opt.some(1_742_999_832.EthTime), depositContractAddress: Opt.some(HOODI_DEPOSIT_CONTRACT_ADDRESS), @@ -609,6 +609,13 @@ func chainConfigForNetwork*(id: NetworkId): ChainConfig = else: ChainConfig() + {.cast(noSideEffect).}: + # Obviously we lie about no side effect. + # If chonicles enabled and there is something bad with + # the chain config values, `validateChainConfig` will print something. + # But it is very rare and must immediately fixed anyway. + doAssert validateChainConfig(result) + func genesisBlockForNetwork*(id: NetworkId): Genesis {.gcsafe, raises: [ValueError, RlpError].} = result = if id == MainNet: From 37da1503c651eb500f4739c93fd810df951d166b Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 19 May 2025 21:48:44 +0700 Subject: [PATCH 020/138] eth/69: Disconnect peer when receive invalid blockRangeUpdate (#3300) * eth/69: Disconnect peer when receive invalid blockRangeUpdate * Add trace log when disconnecting peer * Use debug instead of trace to log blockRangeUpdate --- execution_chain/sync/wire_protocol/responder.nim | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/execution_chain/sync/wire_protocol/responder.nim b/execution_chain/sync/wire_protocol/responder.nim index 5a61e495fd..bd19678bbe 100644 --- a/execution_chain/sync/wire_protocol/responder.nim +++ b/execution_chain/sync/wire_protocol/responder.nim @@ -283,6 +283,13 @@ proc blockRangeUpdateUserHandler(peer: Peer; packet: BlockRangeUpdatePacket) {. earliest = packet.earliest, latest = packet.latest, latestHash = packet.latestHash.short + if packet.earliest > packet.latest: + debug "Disconnecting peer because of protocol breach", + remote = peer.remote, clientId = peer.clientId, + msg = "blockRangeUpdate must have latest >= earliest" + await peer.disconnect(BreachOfProtocol) + return + peer.state(eth69).earliest = packet.earliest peer.state(eth69).latest = packet.latest peer.state(eth69).latestHash = packet.latestHash From 05eaffbe06024f326499662d691d6bfa0b6127f4 Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Mon, 19 May 2025 16:25:33 +0000 Subject: [PATCH 021/138] Beacon sync update header and reorg blocks processing (#3306) * Code cosmetics, docu/comment and logging updates, etc. * Explicitly limit header queue length why Beware of outliers (remember law of iterated log.) also No need to reorg the header queue, anymore. This was a pre-PR #3125 feature which was needed to curb the queue when it grow too large. This cannot happen anymore as there is always a deterministic fetch that can solve any immediate gap preventing the queue from serialising headers. * Fix issue #3298 reason for crash The syncer will stop trying downloading headers after failing on 30 different sync peers. The state machine will advance to `cancelHeaders` causing all sync peers to stop as soon as they can without updating the bookkeeping for unprocessed headers which might leave the `books` in an open or non-finalised state. Unfortunately, when synchronising all simultaneously running sync peers, the *books* were checked for sort of being finalised already before cleaning up (aka finalising.) * Remove `--debug-beacon-sync-blocks-queue-hwm` command line option why Not needed anymore as the block queue will run on a smaller memory footprint, anyway. * Allow downloading blocks while importing/executing simultaneously why The last PRs merged seem to have made a change, presumably in the `FC` module running `async`. This allows for importing/executing blocks while fetching new ones at the same without depleting sync peers. Previously, all sync peers were gone after a while when doing this. * Move out blocks import section as a separate source module * Reorg blocks download and import/execute why Blocks download and import is now modelled after how it is done for the headers: + if a sync peer can import right at the top of the `FC` module, download a list of blocks and import right away + Otherwise, if a sync peer cannot directly import, then download and queue a list of blocks if there is space on the queue As a separate pseudo task, fetch a list of blocks from the queue if it can be imported right at the top of the `FC` module --- execution_chain/config.nim | 6 - .../core/chain/header_chain_cache.nim | 6 + execution_chain/nimbus_execution_client.nim | 2 +- execution_chain/sync/beacon.nim | 2 - execution_chain/sync/beacon/worker.nim | 66 ++- .../sync/beacon/worker/blocks_staged.nim | 539 +++++++----------- .../worker/blocks_staged/staged_blocks.nim | 211 +++++++ .../sync/beacon/worker/headers_staged.nim | 174 +++--- .../worker/headers_staged/staged_collect.nim | 2 +- .../sync/beacon/worker/helpers.nim | 8 +- .../sync/beacon/worker/start_stop.nim | 29 +- execution_chain/sync/beacon/worker/update.nim | 7 +- execution_chain/sync/beacon/worker_config.nim | 36 +- execution_chain/sync/beacon/worker_desc.nim | 12 +- execution_chain/sync/sync_sched.nim | 38 +- 15 files changed, 576 insertions(+), 562 deletions(-) create mode 100644 execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim diff --git a/execution_chain/config.nim b/execution_chain/config.nim index 0900335d57..be59d1817b 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -352,12 +352,6 @@ type "is accepted" name: "debug-beacon-sync-target-file" .}: Option[InputFile] - beaconSyncBlocksQueueHwm* {. - hidden - desc: "Limit number of blocks on staging queue for beacon sync" - defaultValue: 0 - name: "debug-beacon-sync-blocks-queue-hwm" .}: int - rocksdbMaxOpenFiles {. hidden defaultValue: defaultMaxOpenFiles diff --git a/execution_chain/core/chain/header_chain_cache.nim b/execution_chain/core/chain/header_chain_cache.nim index bb0756968c..688649972a 100644 --- a/execution_chain/core/chain/header_chain_cache.nim +++ b/execution_chain/core/chain/header_chain_cache.nim @@ -593,6 +593,12 @@ func head*(hc: HeaderChainRef): Header = if collecting <= hc.state: return hc.session.head +func headHash*(hc: HeaderChainRef): Hash32 = + ## Getter: hash of `head()` + ## + if collecting <= hc.state: + return hc.session.headHash + func antecedent*(hc: HeaderChainRef): Header = ## Getter: bottom of header chain. In case there is no header chain ## initialised, the return value is `Header()` (i.e. the block number diff --git a/execution_chain/nimbus_execution_client.nim b/execution_chain/nimbus_execution_client.nim index 79a957cc84..f781f52ccb 100644 --- a/execution_chain/nimbus_execution_client.nim +++ b/execution_chain/nimbus_execution_client.nim @@ -117,7 +117,7 @@ proc setupP2P(nimbus: NimbusNode, conf: NimbusConf, # Always initialise beacon syncer nimbus.beaconSyncRef = BeaconSyncRef.init( - nimbus.ethNode, nimbus.fc, conf.maxPeers, conf.beaconSyncBlocksQueueHwm) + nimbus.ethNode, nimbus.fc, conf.maxPeers) # Optional for pre-setting the sync target (i.e. debugging) if conf.beaconSyncTargetFile.isSome(): diff --git a/execution_chain/sync/beacon.nim b/execution_chain/sync/beacon.nim index e3d43d3935..e7996039ed 100644 --- a/execution_chain/sync/beacon.nim +++ b/execution_chain/sync/beacon.nim @@ -62,11 +62,9 @@ proc init*( ethNode: EthereumNode; chain: ForkedChainRef; maxPeers: int; - blockQueueHwm = 0; ): T = var desc = T() desc.initSync(ethNode, maxPeers) - desc.ctx.pool.blkStagedHwm = blockQueueHwm desc.ctx.pool.chain = chain desc diff --git a/execution_chain/sync/beacon/worker.nim b/execution_chain/sync/beacon/worker.nim index 667dad54c4..dddbf2441a 100644 --- a/execution_chain/sync/beacon/worker.nim +++ b/execution_chain/sync/beacon/worker.nim @@ -24,14 +24,13 @@ import # Private functions # ------------------------------------------------------------------------------ -proc napUnlessSomethingToFetch( +proc napUnlessSomethingToCollect( buddy: BeaconBuddyRef; ): Future[bool] {.async: (raises: []).} = ## When idle, save cpu cycles waiting for something to do. - if buddy.ctx.pool.blkImportOk or # currently importing blocks - buddy.ctx.hibernate or # not activated yet? - not (buddy.headersStagedFetchOk() or # something on TODO list - buddy.blocksStagedFetchOk()): + if buddy.ctx.hibernate or # not activated yet? + not (buddy.headersStagedCollectOk() or # something on TODO list + buddy.blocksStagedCollectOk()): try: await sleepAsync workerIdleWaitInterval except CancelledError: @@ -118,7 +117,7 @@ proc runDaemon*( ) {.async: (raises: []).} = ## Global background job that will be re-started as long as the variable ## `ctx.daemon` is set `true` which corresponds to `ctx.hibernating` set - ## to false`. + ## to false. ## ## On a fresh start, the flag `ctx.daemon` will not be set `true` before the ## first usable request from the CL (via RPC) stumbles in. @@ -129,22 +128,14 @@ proc runDaemon*( return # Execute staged block records. - if ctx.blocksStagedCanImportOk(): - - block: - # Set flag informing peers to go into idle mode while importing takes - # place. It has been observed that importing blocks and downloading - # at the same time does not work very well, most probably due to high - # system activity while importing. Peers will get lost pretty soon after - # downloading starts if they continue downloading. - ctx.pool.blkImportOk = true - defer: ctx.pool.blkImportOk = false - - # Import from staged queue. - while await ctx.blocksStagedImport(info): - if not ctx.daemon or # Implied by external sync shutdown? - ctx.poolMode: # Oops, re-org needed? - return + if ctx.blocksStagedProcessOk(): + + # Import bodies from the `staged` queue. + discard await ctx.blocksStagedProcess info + + if not ctx.daemon or # Implied by external sync shutdown? + ctx.poolMode: # Oops, re-org needed? + return # At the end of the cycle, leave time to trigger refill headers/blocks try: await sleepAsync daemonWaitInterval @@ -187,23 +178,36 @@ proc runPeer*( buddy.only.multiRunIdle = Moment.now() - buddy.only.stoppedMultiRun buddy.only.nMultiLoop.inc # statistics/debugging - if not await buddy.napUnlessSomethingToFetch(): + if not await buddy.napUnlessSomethingToCollect(): # Download and process headers and blocks - while buddy.headersStagedFetchOk(): + while buddy.headersStagedCollectOk(): # Collect headers and either stash them on the header chain cache - # directly, or stage then on the header queue to get them serialised, - # later. - if await buddy.headersStagedCollect info: + # directly, or stage on the header queue to get them serialised and + # stashed, later. + await buddy.headersStagedCollect info - # Store headers from the `staged` queue onto the header chain cache. - buddy.headersStagedProcess info + # Store serialised headers from the `staged` queue onto the header + # chain cache. + if not buddy.headersStagedProcess info: + # Need to proceed with another peer (e.g. gap between queue and + # header chain cache.) + break # Fetch bodies and combine them with headers to blocks to be staged. These # staged blocks are then excuted by the daemon process (no `peer` needed.) - while buddy.blocksStagedFetchOk(): - discard await buddy.blocksStagedCollect info + while buddy.blocksStagedCollectOk(): + + # Collect bodies and either import them via `FC` module, or stage on + # the blocks queue to get them serialised and imported, later. + await buddy.blocksStagedCollect info + + # Import bodies from the `staged` queue. + if not await buddy.blocksStagedProcess info: + # Need to proceed with another peer (e.g. gap between top imported + # block and blocks queue.) + break # Note that it is important **not** to leave this function to be # re-invoked by the scheduler unless necessary. While the time gap diff --git a/execution_chain/sync/beacon/worker/blocks_staged.nim b/execution_chain/sync/beacon/worker/blocks_staged.nim index 935219f036..fee39f6c5e 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged.nim @@ -11,400 +11,255 @@ {.push raises:[].} import - pkg/[chronicles, chronos], + pkg/[chronicles, chronos, results], pkg/eth/common, pkg/stew/[interval_set, sorted_set], + ../../../networking/p2p, ../worker_desc, - ./blocks_staged/bodies, - ../../wire_protocol/types, - ./[blocks_unproc, helpers, update] + ./blocks_staged/[bodies, staged_blocks], + ./[blocks_unproc, helpers] # ------------------------------------------------------------------------------ -# Private debugging & logging helpers +# Private function(s) # ------------------------------------------------------------------------------ -formatIt(Hash32): - it.short - -# ------------------------------------------------------------------------------ -# Private helpers -# ------------------------------------------------------------------------------ +proc blocksStagedProcessImpl( + ctx: BeaconCtxRef; + maybePeer: Opt[Peer]; + info: static[string]; + ): Future[bool] + {.async: (raises: []).} = + ## Import/execute blocks record from staged queue. + ## + ## The function returns `false` if the caller should make sure to allow + ## to switch to another sync peer, e.g. for directly filling the gap + ## between the top of the `topImported` and the least queue block number. + ## + if ctx.blk.staged.len == 0: + trace info & ": blocksStagedProcess empty queue", peer=($maybePeer), + topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, + poolMode=ctx.poolMode, syncState=ctx.pool.lastState, + nSyncPeers=ctx.pool.nBuddies + return false # switch peer -proc getNthHash(ctx: BeaconCtxRef; blk: BlocksForImport; n: int): Hash32 = - ctx.hdrCache.getHash(blk.blocks[n].header.number).valueOr: - return zeroHash32 + var + nImported = 0u64 # statistics + switchPeer = false # for return code -proc updateBuddyErrorState(buddy: BeaconBuddyRef) = - ## Helper/wrapper - if ((0 < buddy.only.nBdyRespErrors or - 0 < buddy.only.nBdyProcErrors) and buddy.ctrl.stopped) or - fetchBodiesReqErrThresholdCount < buddy.only.nBdyRespErrors or - fetchBodiesProcessErrThresholdCount < buddy.only.nBdyProcErrors: + trace info & ": blocksStagedProcess start", peer=($maybePeer), + topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, + poolMode=ctx.poolMode, syncState=ctx.pool.lastState, + nSyncPeers=ctx.pool.nBuddies - # Make sure that this peer does not immediately reconnect - buddy.ctrl.zombie = true + var minNum = BlockNumber(0) + while ctx.pool.lastState == processingBlocks: -# ------------------------------------------------------------------------------ -# Private function(s) -# ------------------------------------------------------------------------------ + # Fetch list with the least block numbers + let qItem = ctx.blk.staged.ge(0).valueOr: + break # all done -proc fetchAndCheck( - buddy: BeaconBuddyRef; - ivReq: BnRange; - blk: ref BlocksForImport; # update in place - info: static[string]; - ): Future[bool] {.async: (raises: []).} = + # Make sure that the lowest block is available, already. Or the other way + # round: no unprocessed block number range precedes the least staged block. + minNum = qItem.data.blocks[0].header.number + if ctx.blk.topImported + 1 < minNum: + trace info & ": block queue not ready yet", peer=($maybePeer), + topImported=ctx.blk.topImported.bnStr, qItem=qItem.data.blocks.bnStr, + nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies + switchPeer = true # there is a gap -- come back later + break - let - ctx = buddy.ctx - offset = blk.blocks.len.uint64 - - # Make sure that the block range matches the top - doAssert offset == 0 or blk.blocks[offset - 1].header.number+1 == ivReq.minPt - - # Preset/append headers to be completed with bodies. Also collect block hashes - # for fetching missing blocks. - blk.blocks.setLen(offset + ivReq.len) - var request = BlockBodiesRequest( - blockHashes: newSeq[Hash32](ivReq.len) - ) - for n in 1u ..< ivReq.len: - let header = ctx.hdrCache.get(ivReq.minPt + n).valueOr: - # There is nothing one can do here - info "Block header missing (reorg triggered)", ivReq, n, - nth=(ivReq.minPt + n).bnStr - # So require reorg - blk.blocks.setLen(offset) - ctx.poolMode = true - return false - request.blockHashes[n - 1] = header.parentHash - blk.blocks[offset + n].header = header - blk.blocks[offset].header = ctx.hdrCache.get(ivReq.minPt).valueOr: - # There is nothing one can do here - info "Block header missing (reorg triggered)", ivReq, n=0, - nth=ivReq.minPt.bnStr - # So require reorg - blk.blocks.setLen(offset) - ctx.poolMode = true - return false - request.blockHashes[ivReq.len - 1] = - blk.blocks[offset + ivReq.len - 1].header.computeBlockHash - - # Fetch bodies - let bodies = block: - let rc = await buddy.bodiesFetch(request, info) - if rc.isErr: - blk.blocks.setLen(offset) - return false - rc.value - - # Append bodies, note that the bodies are not fully verified here but rather - # when they are imported and executed. - let nBodies = bodies.len.uint64 - if nBodies < ivReq.len: - blk.blocks.setLen(offset + nBodies) - block loop: - for n in 0 ..< nBodies: - block checkTxLenOk: - if blk.blocks[offset + n].header.transactionsRoot != emptyRoot: - if 0 < bodies[n].transactions.len: - break checkTxLenOk - else: - if bodies[n].transactions.len == 0: - break checkTxLenOk - # Oops, cut off the rest - blk.blocks.setLen(offset + n) - buddy.fetchRegisterError() - trace info & ": cut off fetched junk", peer=buddy.peer, ivReq, n, - nTxs=bodies[n].transactions.len, nBodies, bdyErrors=buddy.bdyErrors - break loop - - blk.blocks[offset + n].transactions = bodies[n].transactions - blk.blocks[offset + n].uncles = bodies[n].uncles - blk.blocks[offset + n].withdrawals = bodies[n].withdrawals - - if offset < blk.blocks.len.uint64: - return true - - buddy.only.nBdyProcErrors.inc - return false + # Remove from queue + discard ctx.blk.staged.delete qItem.key -# ------------------------------------------------------------------------------ -# Public functions -# ------------------------------------------------------------------------------ + # Import blocks list + await ctx.blocksImport(maybePeer, qItem.data.blocks, info) -func blocksStagedCanImportOk*(ctx: BeaconCtxRef): bool = - ## Check whether the queue is at its maximum size so import can start with - ## a full queue. - ## - if ctx.poolMode: - # Re-org is scheduled - return false + # Import probably incomplete, so a partial roll back may be needed + let lastBn = qItem.data.blocks[^1].header.number + if ctx.blk.topImported < lastBn: + ctx.blocksUnprocAppend(ctx.blk.topImported+1, lastBn) - if 0 < ctx.blk.staged.len: - # Start importing if there are no more blocks available. So they have - # either been all staged, or are about to be staged. For the latter - # case wait until finished with current block downloads. - if ctx.blocksUnprocAvail() == 0: + nImported += ctx.blk.topImported - minNum + 1 + # End while loop - # Wait until finished with current block downloads - return ctx.blocksBorrowedIsEmpty() + if 0 < nImported: + info "Blocks serialised and imported", + topImported=ctx.blk.topImported.bnStr, nImported, + nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer - # Make sure that the lowest block is available, already. Or the other way - # round: no unprocessed block number range precedes the least staged block. - if ctx.blk.staged.ge(0).value.key < ctx.blocksUnprocTotalBottom(): - # Also suggest importing blocks if there is currently no peer active. - # The `unprocessed` ranges will contain some higher number block ranges, - # but these can be fetched later. - if ctx.pool.nBuddies == 0: - return true - - # If the last peer is labelled `slow` it will be ignored for the sake - # of deciding whether to execute blocks. - # - # As a consequence, the syncer will import blocks immediately allowing - # the syncer to collect more sync peers. - if ctx.pool.nBuddies == 1 and ctx.pool.blkLastSlowPeer.isSome: - return true - - # If importing starts while peers are actively downloading, the system - # tends to loose download peers, most probably due to high system - # activity. - # - # * Typical download time to download and stage a queue record ~15s (raw - # download time typically ranges ~30ms ..~10s) - # - # * Anecdotal time to connect to a new download peer ~5m..~10m - # - # This implies that a staged full queue with 4 records typically does - # not take more than a minute, much less if enough peers are available - # while the penalty of potentially losing peers is a multiple of the - # queue ramp up time. - # - # So importing does not start before the queue is filled up. - if ctx.pool.blkStagedLenHwm <= ctx.blk.staged.len: + elif 0 < ctx.blk.staged.len and not switchPeer: + trace info & ": no blocks unqueued", peer=($maybePeer), + topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, + nSyncPeers=ctx.pool.nBuddies - # Wait until finished with current block downloads - return ctx.blocksBorrowedIsEmpty() + trace info & ": blocksStagedProcess end", peer=($maybePeer), + topImported=ctx.blk.topImported.bnStr, nImported, minNum, + nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer - false + return not switchPeer +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ -func blocksStagedFetchOk*(buddy: BeaconBuddyRef): bool = - ## Check whether body records can be fetched and stored on the `staged` queue. +func blocksStagedCollectOk*(buddy: BeaconBuddyRef): bool = + ## Check whether body records can be fetched and imported or stored + ## on the `staged` queue. ## if buddy.ctrl.running: - let ctx = buddy.ctx - if not ctx.poolMode: - - if 0 < ctx.blocksUnprocAvail(): - # Fetch if there is space on the queue. - if ctx.blk.staged.len < ctx.pool.blkStagedLenHwm: - return true - - # Make sure that there is no gap at the bottom which needs to be - # fetched regardless of the length of the queue. - if ctx.blocksUnprocAvailBottom() < ctx.blk.staged.ge(0).value.key: - return true + if 0 < ctx.blocksUnprocAvail() and + not ctx.blocksModeStopped(): + return true false +proc blocksStagedProcessOk*(ctx: BeaconCtxRef): bool = + ## Check whether import processing is possible + ## + not ctx.poolMode and + 0 < ctx.blk.staged.len +# -------------- proc blocksStagedCollect*( buddy: BeaconBuddyRef; info: static[string]; - ): Future[bool] {.async: (raises: []).} = - ## Collect bodies and stage them. + ) {.async: (raises: []).} = + ## Collect bodies and import or stage them. ## let ctx = buddy.ctx peer = buddy.peer - if ctx.blocksUnprocAvail() == 0 or # all done already? - ctx.poolMode: # reorg mode? - return false # nothing to do - - let - # Fetch the full range of headers to be completed to blocks - iv = ctx.blocksUnprocFetch(nFetchBodiesBatch.uint64).expect "valid interval" + if ctx.blocksUnprocIsEmpty(): + return # no action var - # This value is used for splitting the interval `iv` into - # `already-collected + [ivBottom,somePt] + [somePt+1,iv.maxPt]` where the - # middle interval `[ivBottom,somePt]` will be fetched from the network. - ivBottom = iv.minPt - - # This record will accumulate the fetched headers. It must be on the heap - # so that `async` can capture that properly. - blk = (ref BlocksForImport)() - - # Flag, not to reset error count - haveError = false - - while true: - # Extract bottom range interval and fetch/stage it - let - ivReqMax = if iv.maxPt < ivBottom + nFetchBodiesRequest - 1: iv.maxPt - else: ivBottom + nFetchBodiesRequest - 1 - - # Request interval - ivReq = BnRange.new(ivBottom, ivReqMax) - - # Current length of the blocks queue. This is used to calculate the - # response length from the network. - nBlkBlocks = blk.blocks.len - - # Fetch and extend staging record - if not await buddy.fetchAndCheck(ivReq, blk, info): - if ctx.poolMode: - # Reorg requested? - ctx.blocksUnprocCommit(iv, iv) - return false - - haveError = true - - # Throw away first time block fetch data. Keep other data for a - # partially assembled list. - if nBlkBlocks == 0: - buddy.updateBuddyErrorState() - - if ctx.pool.seenData: - trace info & ": current blocks discarded", peer, iv, ivReq, - nStaged=ctx.blk.staged.len, ctrl=buddy.ctrl.state, - bdyErrors=buddy.bdyErrors - else: - # Collect peer for detecting cul-de-sac syncing (i.e. non-existing - # block chain or similar.) This covers the case when headers are - # available but not block bodies. - ctx.pool.failedPeers.incl buddy.peerID - - debug info & ": no blocks yet", peer, ctrl=buddy.ctrl.state, - failedPeers=ctx.pool.failedPeers.len, bdyErrors=buddy.bdyErrors - - ctx.blocksUnprocCommit(iv, iv) - # At this stage allow a task switch so that some other peer might try - # to work on the currently returned interval. - try: await sleepAsync asyncThreadSwitchTimeSlot - except CancelledError: discard - return false - - # So there were some bodies downloaded already. Turn back unused data - # and proceed with staging. - trace info & ": list partially failed", peer, iv, ivReq, - unused=BnRange.new(ivBottom,iv.maxPt) - # There is some left over to store back - ctx.blocksUnprocCommit(iv, ivBottom, iv.maxPt) - break + nImported = 0u64 # statistics, to be updated + nQueued = 0 # ditto + + block fetchBlocksBody: + # + # Start deterministically. Explicitely fetch/append by parent hash. + # + # Exactly one peer can fetch and import store blocks directly on the `FC` + # module. All other peers fetch and queue blocks for later serialisation. + while true: + let bottom = ctx.blocksUnprocAvailBottom() - 1 + # + # A direct fetch and blocks import is possible if the next block to + # fetch neigbours the already imported blocks ening at `lastImported`. + # So this criteria is unique at a given time and when an interval is + # taken out of the `unproc` pool: + # :: + # |------------------ unproc pool + # |-------| block interval to fetch next + # ----------| already imported into `FC` module + # bottom + # topImported + # + # After claiming the block interval that will be processed next for the + # deterministic fetch, the situation for the new `bottom` would look like + # :: + # |--------- unproc pool + # |-------| block interval to fetch next + # ----------| already imported into `FC` module + # topImported bottom + # + if ctx.blk.topImported < bottom: + break - # There are block body data for this scrum - ctx.pool.seenData = true + # Throw away overlap (should not happen anyway) + if bottom < ctx.blk.topImported: + discard ctx.blocksUnprocFetch(ctx.blk.topImported - bottom).expect("iv") - # Update remaining interval - let ivRespLen = blk.blocks.len - nBlkBlocks - if iv.maxPt < ivBottom + ivRespLen.uint64: - # All collected - ctx.blocksUnprocCommit(iv) - break + trace info & ": blocksStagedCollect direct loop", peer, + ctrl=buddy.ctrl.state, poolMode=ctx.poolMode, + syncState=ctx.pool.lastState, topImported=ctx.blk.topImported.bnStr, + bottom=bottom.bnStr - ivBottom += ivRespLen.uint64 # will mostly result into `ivReq.maxPt+1` + # Fetch blocks and verify result + let blocks = (await buddy.blocksFetch(nFetchBodiesRequest, info)).valueOr: + break fetchBlocksBody # done, exit this function - if buddy.ctrl.stopped or ctx.poolMode: - # There is some left over to store back. And `ivBottom <= iv.maxPt` - # because of the check against `ivRespLen` above. - ctx.blocksUnprocCommit(iv, ivBottom, iv.maxPt) - break + # Set flag that there were some blocks fetched at all + ctx.pool.seenData = true # blocks data exist - # Store `blk` chain on the `staged` queue - let qItem = ctx.blk.staged.insert(iv.minPt).valueOr: - raiseAssert info & ": duplicate key on staged queue iv=" & $iv - qItem.data = blk[] + # Import blocks (no staging) + await ctx.blocksImport(Opt.some(peer), blocks, info) - # Reset block process errors (not too many consecutive failures this time) - if not haveError: - buddy.only.nBdyProcErrors = 0 + # Import probably incomplete, so a partial roll back may be needed + let lastBn = blocks[^1].header.number + if ctx.blk.topImported < lastBn: + ctx.blocksUnprocAppend(ctx.blk.topImported + 1, lastBn) - info "Downloaded blocks", iv=blk.blocks.bnStr, - nBlocks=blk.blocks.len, nStaged=ctx.blk.staged.len, - nSyncPeers=ctx.pool.nBuddies + # statistics + nImported += ctx.blk.topImported - blocks[0].header.number + 1 - return true + # Buddy might have been cancelled while importing blocks. + if buddy.ctrl.stopped or ctx.poolMode: + break fetchBlocksBody # done, exit this function + # End while: headersUnprocFetch() + blocksImport() -proc blocksStagedImport*( - ctx: BeaconCtxRef; - info: static[string]; - ): Future[bool] - {.async: (raises: []).} = - ## Import/execute blocks record from staged queue - ## - let qItem = ctx.blk.staged.ge(0).valueOr: - # Empty queue - return false + # Continue fetching blocks and queue them (if any) + if ctx.blk.staged.len + ctx.blk.reserveStaged < blocksStagedQueueLengthHwm: - # Make sure that the lowest block is available, already. Or the other way - # round: no unprocessed block number range precedes the least staged block. - let uBottom = ctx.blocksUnprocTotalBottom() - if uBottom < qItem.key: - trace info & ": block queue not ready yet", nSyncPeers=ctx.pool.nBuddies, - unprocBottom=uBottom.bnStr, least=qItem.key.bnStr - return false + # Fetch blocks and verify result + ctx.blk.reserveStaged.inc # Book a slot on `staged` + let rc = await buddy.blocksFetch(nFetchBodiesRequest, info) + ctx.blk.reserveStaged.dec # Free that slot again - # Remove from queue - discard ctx.blk.staged.delete qItem.key + if rc.isErr: + break fetchBlocksBody # done, exit this function - let - nBlocks = qItem.data.blocks.len - iv = BnRange.new(qItem.key, qItem.key + nBlocks.uint64 - 1) - - info "Importing blocks", iv, nBlocks, - base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr - - var maxImport = iv.maxPt # tentatively assume all ok - block importLoop: - for n in 0 ..< nBlocks: - let nBn = qItem.data.blocks[n].header.number - if nBn <= ctx.chain.baseNumber: - trace info & ": ignoring block less eq. base", n, iv, - B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, - nthBn=nBn.bnStr, nthHash=ctx.getNthHash(qItem.data, n).short - continue - - try: - (await ctx.chain.importBlock(qItem.data.blocks[n])).isOkOr: - # The way out here is simply to re-compile the block queue. At any - # point, the `FC` module data area might have been moved to a new - # canonical branch. - # - ctx.poolMode = true - warn info & ": import block error (reorg triggered)", n, iv, - B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, - nthBn=nBn.bnStr, nthHash=ctx.getNthHash(qItem.data, n).short, - `error`=error - maxImport = nBn - break importLoop - # isOk => continue - except CancelledError: - maxImport = nBn # shutdown? - break importLoop - - # Allow pseudo/async thread switch. - (await ctx.updateAsyncTasks()).isOkOr: - maxImport = nBn # shutdown? - break importLoop - - # Import probably incomplete, so a partial roll back may be needed - if maxImport < iv.maxPt: - ctx.blocksUnprocAppend(maxImport+1, iv.maxPt) - - info "Import done", iv=(iv.minPt, maxImport).bnStr, - nBlocks=(maxImport-iv.minPt+1), nFailed=(iv.maxPt-maxImport), - base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr - - return true + let + blocks = rc.value + + # Insert blocks list on the `staged` queue + key = blocks[0].header.number + qItem = ctx.blk.staged.insert(key).valueOr: + raiseAssert info & ": duplicate key on staged queue iv=" & + (key, blocks[^1].header.number).bnStr + + qItem.data.blocks = blocks # store `blocks[]` list + + nQueued += blocks.len # statistics + + # End block: `fetchBlocksBody` + + if nImported == 0 and nQueued == 0: + if not ctx.pool.seenData and + buddy.peerID notin ctx.pool.failedPeers and + buddy.ctrl.stopped: + # Collect peer for detecting cul-de-sac syncing (i.e. non-existing + # block chain or similar.) + ctx.pool.failedPeers.incl buddy.peerID + + debug info & ": no blocks yet", peer, ctrl=buddy.ctrl.state, + poolMode=ctx.poolMode, syncState=ctx.pool.lastState, + failedPeers=ctx.pool.failedPeers.len, bdyErrors=buddy.bdyErrors + return + info "Queued/staged or imported blocks", + topImported=ctx.blk.topImported.bnStr, + unprocBottom=(if ctx.blocksModeStopped(): "n/a" + else: ctx.blocksUnprocAvailBottom.bnStr), + nQueued, nImported, nStagedQ=ctx.blk.staged.len, + nSyncPeers=ctx.pool.nBuddies + + +template blocksStagedProcess*( + ctx: BeaconCtxRef; + info: static[string]; + ): auto = + ctx.blocksStagedProcessImpl(Opt.none(Peer), info) + +template blocksStagedProcess*( + buddy: BeaconBuddyRef; + info: static[string]; + ): auto = + buddy.ctx.blocksStagedProcessImpl(Opt.some(buddy.peer), info) proc blocksStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = @@ -429,7 +284,7 @@ proc blocksStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = # Reset block queues debug info & ": Flushing block queues", nUnproc=ctx.blocksUnprocTotal(), - nStaged=ctx.blk.staged.len, nReorg=ctx.pool.nReorg + nStagedQ=ctx.blk.staged.len, nReorg=ctx.pool.nReorg ctx.blocksUnprocClear() ctx.blk.staged.clear() diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim new file mode 100644 index 0000000000..eb4097aa70 --- /dev/null +++ b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim @@ -0,0 +1,211 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at +# https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at +# https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises:[].} + +import + pkg/[chronicles, chronos, results], + pkg/eth/common, + pkg/stew/interval_set, + ../../../../networking/p2p, + ../../../wire_protocol/types, + ../../worker_desc, + ../[blocks_unproc, helpers, update], + ./bodies + +# ------------------------------------------------------------------------------ +# Private helpers +# ------------------------------------------------------------------------------ + +formatIt(Hash32): + it.short + +proc getNthHash(ctx: BeaconCtxRef; blocks: seq[EthBlock]; n: int): Hash32 = + ctx.hdrCache.getHash(blocks[n].header.number).valueOr: + return zeroHash32 + +# ------------------------------------------------------------------------------ +# Private functions +# ------------------------------------------------------------------------------ + +proc blocksFetchCheckImpl( + buddy: BeaconBuddyRef; + iv: BnRange; + info: static[string]; + ): Future[Opt[seq[EthBlock]]] + {.async: (raises: []).} = + ## From the ptp/ethXX network fetch the argument range `iv` of block bodies + ## and assemble a list of blocks to be returned. + ## + ## The block bodies are heuristically verified, the headers are taken from + ## the header chain cache. + ## + let + ctx = buddy.ctx + peer = buddy.peer + + # Preset/append headers to be completed with bodies. Also collect block hashes + # for fetching missing blocks. + var + request = BlockBodiesRequest(blockHashes: newSeqUninit[Hash32](iv.len)) + blocks = newSeq[EthBlock](iv.len) + + for n in 1u ..< iv.len: + let header = ctx.hdrCache.get(iv.minPt + n).valueOr: + # There is nothing one can do here + info "Block header missing (reorg triggered)", peer, iv, n, + nth=(iv.minPt + n).bnStr + ctx.poolMode = true # So require reorg + return Opt.none(seq[EthBlock]) + request.blockHashes[n - 1] = header.parentHash + blocks[n].header = header + blocks[0].header = ctx.hdrCache.get(iv.minPt).valueOr: + # There is nothing one can do here + info "Block header missing (reorg triggered)", peer, iv, n=0, + nth=iv.minPt.bnStr + ctx.poolMode = true # So require reorg + return Opt.none(seq[EthBlock]) + request.blockHashes[^1] = blocks[^1].header.computeBlockHash + + # Fetch bodies + let bodies = (await buddy.bodiesFetch(request, info)).valueOr: + return Opt.none(seq[EthBlock]) + if buddy.ctrl.stopped: + return Opt.none(seq[EthBlock]) + + # Append bodies, note that the bodies are not fully verified here but rather + # when they are imported and executed. + let nBodies = bodies.len.uint64 + if nBodies < iv.len: + blocks.setLen(nBodies) + block loop: + for n in 0 ..< nBodies: + block checkTxLenOk: + if blocks[n].header.transactionsRoot != emptyRoot: + if 0 < bodies[n].transactions.len: + break checkTxLenOk + else: + if bodies[n].transactions.len == 0: + break checkTxLenOk + # Oops, cut off the rest + blocks.setLen(n) # curb off junk + buddy.fetchRegisterError() + trace info & ": cut off junk blocks", peer, iv, n, + nTxs=bodies[n].transactions.len, nBodies, bdyErrors=buddy.bdyErrors + break loop + + blocks[n].transactions = bodies[n].transactions + blocks[n].uncles = bodies[n].uncles + blocks[n].withdrawals = bodies[n].withdrawals + + if 0 < blocks.len.uint64: + return Opt.some(blocks) + + buddy.only.nBdyProcErrors.inc + return Opt.none(seq[EthBlock]) + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +func blocksModeStopped*(ctx: BeaconCtxRef): bool = + ## Helper, checks whether there is a general stop conditions based on + ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) + ctx.poolMode or + ctx.pool.lastState != processingBlocks + + +proc blocksFetch*( + buddy: BeaconBuddyRef; + num: uint; + info: static[string]; + ): Future[Opt[seq[EthBlock]]] + {.async: (raises: []).} = + ## From the p2p/ethXX network fetch as many blocks as given as argument `num`. + let + ctx = buddy.ctx + + # Fetch nect available interval + iv = ctx.blocksUnprocFetch(num).valueOr: + return Opt.none(seq[EthBlock]) + + # Fetch blocks and verify result + rc = await buddy.blocksFetchCheckImpl(iv, info) + + # Commit blocks received + if rc.isErr: + ctx.blocksUnprocCommit(iv, iv) + else: + ctx.blocksUnprocCommit(iv, iv.minPt + rc.value.len.uint64, iv.maxPt) + + return rc + + +proc blocksImport*( + ctx: BeaconCtxRef; + maybePeer: Opt[Peer]; + blocks: seq[EthBlock]; + info: static[string]; + ) {.async: (raises: []).} = + ## Import/execute a list of argument blocks. The function sets the global + ## block number of the last executed block which might preceed the least block + ## number from the argument list in case of an error. + ## + let iv = BnRange.new(blocks[0].header.number, blocks[^1].header.number) + doAssert iv.len == blocks.len.uint64 + + trace info & ": Start importing blocks", peer=($maybePeer), iv, + nBlocks=iv.len, base=ctx.chain.baseNumber.bnStr, + head=ctx.chain.latestNumber.bnStr, target=ctx.head.bnStr + + block loop: + for n in 0 ..< blocks.len: + let nBn = blocks[n].header.number + + if nBn <= ctx.chain.baseNumber: + trace info & ": ignoring block less eq. base", n, iv, nBlocks=iv.len, + nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n), + B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr + + ctx.blk.topImported = nBn # well, not really imported + continue + + try: + (await ctx.chain.importBlock(blocks[n])).isOkOr: + # The way out here is simply to re-compile the block queue. At any + # point, the `FC` module data area might have been moved to a new + # canonical branch. + # + ctx.poolMode = true + warn info & ": import block error (reorg triggered)", n, iv, + nBlocks=iv.len, nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n), + B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, + `error`=error + break loop + # isOk => next instruction + except CancelledError: + break loop # shutdown? + + ctx.blk.topImported = nBn # Block imported OK + + # Allow pseudo/async thread switch. + (await ctx.updateAsyncTasks()).isOkOr: + break loop + + info "Imported blocks", iv=(if iv.minPt <= ctx.blk.topImported: + (iv.minPt, ctx.blk.topImported).bnStr else: "n/a"), + nBlocks=(ctx.blk.topImported - iv.minPt + 1), + nFailed=(iv.maxPt - ctx.blk.topImported), + base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, + target=ctx.head.bnStr + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/headers_staged.nim b/execution_chain/sync/beacon/worker/headers_staged.nim index 209b138a99..8d2ab20e7f 100644 --- a/execution_chain/sync/beacon/worker/headers_staged.nim +++ b/execution_chain/sync/beacon/worker/headers_staged.nim @@ -22,39 +22,35 @@ import # Public functions # ------------------------------------------------------------------------------ -func headersStagedFetchOk*(buddy: BeaconBuddyRef): bool = - # Helper for `worker.nim`, etc. - 0 < buddy.ctx.headersUnprocAvail() and - buddy.ctrl.running and - not buddy.ctx.collectModeStopped() - +func headersStagedCollectOk*(buddy: BeaconBuddyRef): bool = + ## Helper for `worker.nim`, etc. + if buddy.ctrl.running: + let ctx = buddy.ctx + if 0 < ctx.headersUnprocAvail() and + not ctx.collectModeStopped(): + return true + false proc headersStagedCollect*( buddy: BeaconBuddyRef; info: static[string]; - ): Future[bool] {.async: (raises: []).} = + ) {.async: (raises: []).} = ## Collect headers and either stash them on the header chain cache directly, ## or stage then on the header queue to get them serialised, later. The ## header queue serialisation is needed in case of several peers fetching ## and processing headers concurrently. ## - ## If there are headers left to process, tThis function will always stages - ## a header list record on the header queue for serialisation, and returns - ## `true`. - ## - ## Otherwise the function returns `false` if there are no headers left to be - ## processed. - ## let ctx = buddy.ctx peer = buddy.peer if ctx.headersUnprocIsEmpty() or ctx.hdrCache.state != collecting: - return false # no action + return # no action + var - nDeterministic = 0u64 # statistics, to be updated - nOpportunistic = 0 # ditto + nStored = 0u64 # statistics, to be updated + nQueued = 0 # ditto block fetchHeadersBody: # @@ -78,7 +74,7 @@ proc headersStagedCollect*( # dangling # # After claiming the block interval that will be processed next for the - # deterministic fetch, the situation looks like + # deterministic fetch, the situation for the new `top` would look like # :: # ---------| unproc pool # |-------| block interval to fetch next @@ -110,16 +106,16 @@ proc headersStagedCollect*( # Check whether there were some headers fetched at all if bottom < iv.maxPt: - nDeterministic += (iv.maxPt - bottom) # statistics + nStored += (iv.maxPt - bottom) # statistics ctx.pool.seenData = true # header data exist # Job might have been cancelled or completed while downloading headers. # If so, no more bookkeeping of headers must take place. The *books* # might have been reset and prepared for the next stage. if ctx.collectModeStopped(): - trace info & ": deterministic headers fetch stopped", peer, iv, - bottom=bottom.bnStr, nDeterministic, syncState=ctx.pool.lastState, - cacheMode=ctx.hdrCache.state + trace info & ": stopped fetching/storing headers", peer, iv, + bottom=bottom.bnStr, nStored, ctrl=buddy.ctrl.state, + syncState=ctx.pool.lastState, cacheMode=ctx.hdrCache.state break fetchHeadersBody # done, exit this function # Commit partially processed block numbers @@ -129,12 +125,11 @@ proc headersStagedCollect*( ctx.headersUnprocCommit(iv) # all headers processed - debug info & ": deterministic headers fetch count", peer, + debug info & ": fetched headers count", peer, unprocTop=ctx.headersUnprocAvailTop.bnStr, D=ctx.dangling.bnStr, - nDeterministic, nStaged=ctx.hdr.staged.len, ctrl=buddy.ctrl.state + nStored, nStagedQ=ctx.hdr.staged.len, ctrl=buddy.ctrl.state - # Buddy might have been cancelled while downloading headers. Still - # bookkeeping (aka commiting unused `iv`) needed to proceed. + # Buddy might have been cancelled while downloading headers. if buddy.ctrl.stopped: break fetchHeadersBody @@ -142,7 +137,8 @@ proc headersStagedCollect*( # Continue opportunistically fetching by block number rather than hash. The # fetched headers need to be staged and checked/serialised later. - block: + if ctx.hdr.staged.len + ctx.hdr.reserveStaged < headersStagedQueueLengthHwm: + let # Comment see deterministic case iv = ctx.headersUnprocFetch(nFetchHeadersBatch).valueOr: @@ -152,19 +148,21 @@ proc headersStagedCollect*( # heap so that `async` can capture that properly. lhc = (ref LinkedHChain)(peerID: buddy.peerID) - # Fetch headers and fill up the headers list of `lhc`. The function - # returns the last unprocessed block number. - bottom = await buddy.collectAndStageOnMemQueue(iv, lhc, info) + # Fetch headers and fill up the headers list of `lhc`. The function + # returns the last unprocessed block number. + ctx.hdr.reserveStaged.inc # Book a slot on `staged` + let bottom = await buddy.collectAndStageOnMemQueue(iv, lhc, info) + ctx.hdr.reserveStaged.dec # Free that slot again - nOpportunistic = lhc.revHdrs.len # statistics + nQueued = lhc.revHdrs.len # statistics # Job might have been cancelled or completed while downloading headers. # If so, no more bookkeeping of headers must take place. The *books* # might have been reset and prepared for the next stage. if ctx.collectModeStopped(): - trace info & ": staging headers fetch stopped", peer, iv, - bottom=bottom.bnStr, nDeterministic, syncState=ctx.pool.lastState, - cacheMode=ctx.hdrCache.state + trace info & ": stopped fetching/staging headers", peer, iv, + bottom=bottom.bnStr, nStored, ctrl=buddy.ctrl.state, + syncState=ctx.pool.lastState, cacheMode=ctx.hdrCache.state break fetchHeadersBody # done, exit this function # Store `lhc` chain on the `staged` queue if there is any @@ -183,43 +181,48 @@ proc headersStagedCollect*( # End block: `fetchHeadersBody` - let nHeaders = nDeterministic + nOpportunistic.uint64 - if nHeaders == 0: - if not ctx.pool.seenData: + if nStored == 0 and nQueued == 0: + if not ctx.pool.seenData and + buddy.peerID notin ctx.pool.failedPeers and + buddy.ctrl.stopped: # Collect peer for detecting cul-de-sac syncing (i.e. non-existing # block chain or similar.) ctx.pool.failedPeers.incl buddy.peerID debug info & ": no headers yet", peer, ctrl=buddy.ctrl.state, + cacheMode=ctx.hdrCache.state, syncState=ctx.pool.lastState, failedPeers=ctx.pool.failedPeers.len, hdrErrors=buddy.hdrErrors - return false + return - info "Downloaded headers", + info "Queued/staged or DB/stored headers", unprocTop=(if ctx.collectModeStopped(): "n/a" else: ctx.headersUnprocAvailTop.bnStr), - nHeaders, nStaged=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies + nQueued, nStored, nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies - return true - - -proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]) = +proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = ## Store headers from the `staged` queue onto the header chain cache. ## + ## The function returns `false` if the caller should make sure to allow + ## to switch to another sync peer for deterministically filling the gap + ## between the top of the queue and the `dangling` block number. + ## let ctx = buddy.ctx peer = buddy.peer + if ctx.hdr.staged.len == 0: - return # avoids logging + return false # switch peer var - nProcessed = 0 # statistics + nStored = 0 # statistics + switchPeer = false # for return code while ctx.hdrCache.state == collecting: # Fetch list with largest block numbers let qItem = ctx.hdr.staged.le(high BlockNumber).valueOr: - break # all done + break # all done let minNum = qItem.data.revHdrs[^1].number @@ -227,17 +230,12 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]) = dangling = ctx.dangling.number if maxNum + 1 < dangling: debug info & ": gap, serialisation postponed", peer, - qItem=qItem.data.bnStr, D=dangling.bnStr, nProcessed, - nStaged=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies - return # there is a gap -- come back later - - # Overlap must not happen - if maxNum+1 != dangling: - raiseAssert info & ": Overlap" & - " qItem=" & qItem.data.bnStr & " D=" & dangling.bnStr + qItem=qItem.data.bnStr, D=dangling.bnStr, nStored, + nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies + switchPeer = true # there is a gap -- come back later + break - # Process item from `staged` queue. So it is not needed in the list, - # anymore. + # Remove from queue discard ctx.hdr.staged.delete(qItem.key) # Store headers on database @@ -248,72 +246,46 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]) = buddy.incHdrProcErrors qItem.data.peerID debug info & ": discarding staged header list", peer, - qItem=qItem.data.bnStr, D=ctx.dangling.bnStr, nProcessed, + qItem=qItem.data.bnStr, D=ctx.dangling.bnStr, nStored, nDiscarded=qItem.data.revHdrs.len, nSyncPeers=ctx.pool.nBuddies, `error`=error - return + switchPeer = true + break # Antecedent `dangling` of the header cache might not be at `revHdrs[^1]`. let revHdrsLen = maxNum - ctx.dangling.number + 1 - nProcessed += revHdrsLen.int # count headers - + nStored += revHdrsLen.int # count headers # End while loop - if headersStagedQueueLengthLwm < ctx.hdr.staged.len: - ctx.poolMode = true + if 0 < nStored: + info "Headers serialised and stored", D=ctx.dangling.bnStr, nStored, + nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer - debug info & ": headers serialised and stored", peer, D=ctx.dangling.bnStr, - nProcessed, nStagedLists=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies, - reorgReq=ctx.poolMode + elif 0 < ctx.hdr.staged.len and not switchPeer: + trace info & ": no headers processed", peer, D=ctx.dangling.bnStr, + nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies + not switchPeer proc headersStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ## Some pool mode intervention. The effect is that all concurrent peers ## finish up their current work and run this function here (which might - ## do nothing.) This stopping should be enough in most cases to re-organise - ## when re-starting concurrently, again. + ## do nothing.) Pool mode is used to sync peers, e.g. for a forced state + ## change. ## - ## Only when the staged list gets too big it will be cleared to be re-filled - ## again. In therory, this might happen on a really slow lead actor - ## (downloading deterministically by hashes) and many fast opportunistic - ## actors filling the staged queue. - ## - doAssert ctx.headersBorrowedIsEmpty() - - if ctx.hdr.staged.len == 0: - # nothing to do - return - - # Update counter - ctx.pool.nReorg.inc - # Check for cancel request if ctx.pool.lastState == cancelHeaders: + # Update counter + ctx.pool.nReorg.inc + # Reset header queues debug info & ": Flushing header queues", nUnproc=ctx.headersUnprocTotal(), - nStaged=ctx.hdr.staged.len, nReorg=ctx.pool.nReorg + nStagedQ=ctx.hdr.staged.len, nReorg=ctx.pool.nReorg - ctx.headersUnprocClear() + ctx.headersUnprocClear() # clears `unprocessed` and `borrowed` list ctx.hdr.staged.clear() - return - - let nStaged = ctx.hdr.staged.len - if headersStagedQueueLengthHwm < nStaged: - debug info & ": hwm reached, flushing staged queue", - headersStagedQueueLengthLwm, nStaged, nReorg=ctx.pool.nReorg - - # Remove the leading `1 + nStaged - headersStagedQueueLengthLwm` entries - # from list so that the upper `headersStagedQueueLengthLwm-1` entries - # remain. - for _ in 0 .. nStaged - headersStagedQueueLengthLwm: - let - qItem = ctx.hdr.staged.ge(BlockNumber 0).expect "valid record" - key = qItem.key - nHeaders = qItem.data.revHdrs.len.uint64 - ctx.headersUnprocAppend(key - nHeaders + 1, key) - discard ctx.hdr.staged.delete key # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim index 36aa54603b..9399f50fc8 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim @@ -82,7 +82,7 @@ func bnStr*(w: LinkedHChain | ref LinkedHChain): string = # ------------------------------------------------------------------------------ func collectModeStopped*(ctx: BeaconCtxRef): bool = - ## Hepler, checks whether there is a general stop conditions based on + ## Helper, checks whether there is a general stop conditions based on ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) ctx.poolMode or ctx.pool.lastState != collectingHeaders or diff --git a/execution_chain/sync/beacon/worker/helpers.nim b/execution_chain/sync/beacon/worker/helpers.nim index 468161774c..b9f811c7ea 100644 --- a/execution_chain/sync/beacon/worker/helpers.nim +++ b/execution_chain/sync/beacon/worker/helpers.nim @@ -13,9 +13,10 @@ ## Extracted helpers from `worker_desc` (avoids circular import) import - pkg/chronos, + pkg/[chronos, results], pkg/eth/common, pkg/stew/interval_set, + ../../../networking/p2p, ../../../utils/prettify, ../../../utils/utils @@ -54,7 +55,10 @@ func toStr*(h: Hash32): string = else: h.short -proc `$`*(w: Interval[BlockNumber,uint64]): string = +func `$`*(w: Interval[BlockNumber,uint64]): string = w.bnStr +func `$`*(w: Opt[Peer]): string = + if w.isSome: $w.value else: "n/a" + # End diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index d7e488655d..37276997ef 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -49,21 +49,7 @@ proc setupServices*(ctx: BeaconCtxRef; info: static[string]) = # Start in suspended mode ctx.hibernate = true - # Take it easy and assume that queue records contain full block list (which - # is mostly the case anyway.) So the the staging queue is limited by the - # number of sub-list records rather than the number of accumulated block - # objects. - let hwm = if blocksStagedLwm <= ctx.pool.blkStagedHwm: ctx.pool.blkStagedHwm - else: blocksStagedHwmDefault - ctx.pool.blkStagedLenHwm = (hwm + nFetchBodiesBatch - 1) div nFetchBodiesBatch - - # Set blocks batch import queue size - if ctx.pool.blkStagedHwm != 0: - debug info & ": import block lists queue", limit=ctx.pool.blkStagedLenHwm - ctx.pool.blkStagedHwm = hwm - - # Set up header cache descriptor. This will evenually be integrated - # into `ForkedChainRef` (i.e. `ctx.pool.chain`.) + # Set up header cache descriptor ctx.pool.hdrCache = HeaderChainRef.init(ctx.pool.chain) # Set up the notifier informing when a new syncer session has started. @@ -95,19 +81,18 @@ proc startBuddy*(buddy: BeaconBuddyRef): bool = ctx = buddy.ctx peer = buddy.peer - if peer.supports(eth69) and - peer.state(eth69).initialized: - ctx.pool.nBuddies.inc - buddy.initHdrProcErrors() - return true + template acceptProto(PROTO: type): bool = + peer.supports(PROTO) and + peer.state(PROTO).initialized - if peer.supports(eth68) and - peer.state(eth68).initialized: + if acceptProto(eth69) or + acceptProto(eth68): ctx.pool.nBuddies.inc ctx.pool.blkLastSlowPeer = Opt.none(Hash) buddy.initHdrProcErrors() return true + proc stopBuddy*(buddy: BeaconBuddyRef) = buddy.ctx.pool.nBuddies.dec buddy.clearHdrProcErrors() diff --git a/execution_chain/sync/beacon/worker/update.nim b/execution_chain/sync/beacon/worker/update.nim index 34213230ea..0512e716d4 100644 --- a/execution_chain/sync/beacon/worker/update.nim +++ b/execution_chain/sync/beacon/worker/update.nim @@ -107,6 +107,7 @@ proc setupProcessingBlocks(ctx: BeaconCtxRef; info: static[string]) = # Update list of block numbers to process ctx.blocksUnprocSet(d, h) + ctx.blk.topImported = d - 1 # State transition ctx.pool.lastState = processingBlocks @@ -207,7 +208,7 @@ proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = info "Sync state changed", prevState, thisState, base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr + target=ctx.head.bnStr, targetHash=ctx.headHash.short # Final sync scrum layout reached or inconsistent/impossible state if thisState == idleSyncState: @@ -231,8 +232,8 @@ proc updateFromHibernateSetTarget*( # Update range ctx.headersUnprocSet(b+1, t-1) - info "Activating syncer", base=b.bnStr, - head=ctx.chain.latestNumber.bnStr, target=t.bnStr, + info "Activating syncer", base=b.bnStr, head=ctx.chain.latestNumber.bnStr, + target=t.bnStr, targetHash=ctx.headHash.short, nSyncPeers=ctx.pool.nBuddies return diff --git a/execution_chain/sync/beacon/worker_config.nim b/execution_chain/sync/beacon/worker_config.nim index 6c398f04f0..36b8527e10 100644 --- a/execution_chain/sync/beacon/worker_config.nim +++ b/execution_chain/sync/beacon/worker_config.nim @@ -86,28 +86,19 @@ const ## Length of the request/stage batch. Several headers are consecutively ## fetched and stashed together as a single record on the staged queue. - headersStagedQueueLengthLwm* = 16 + headersStagedQueueLengthHwm* = 8 ## Limit the number of records in the staged headers queue. ## ## Queue entries start accumulating if one peer stalls while fetching the ## top chain so leaving a gap. This gap must be filled first before ## inserting the queue into a contiguous chain of headers. - ## - ## This low-water mark tryggers the system to do some **magic** to mitigate - ## the above problem. Currently the **magic** is to let (pseudo) threads - ## terminate and then restart all over again. - - headersStagedQueueLengthHwm* = 24 - ## If this size is exceeded, the staged queue is flushed and resized to - ## `headersStagedQueueLengthLwm-1` entries. Then contents is re-fetched - ## from scratch. # ---------------------- fetchBodiesFailedInitialFailPeersHwm* = 50 ## Similar to `fetchHeadersFailedInitialFailPeersHwm` - nFetchBodiesRequest* = 128 + nFetchBodiesRequest* = 64 ## Similar to `nFetchHeadersRequest` fetchBodiesReqErrThresholdZombie* = chronos.seconds(4) @@ -120,23 +111,11 @@ const fetchBodiesReqMinResponsePC* = 10 ## Similar to `fetchHeadersReqMinResponsePC` - nFetchBodiesBatch* = 3 * nFetchBodiesRequest - ## Similar to `nFetchHeadersBatch` - ## - ## With an average less than 90KiB/block (on `mainnet` at block ~#22m), - ## one arrives at a total of at most 35MiB per block batch. - - blocksStagedHwmDefault* = 8 * nFetchBodiesBatch - ## This is an initialiser value for `blocksStagedHwm`. - ## - ## If the staged block queue exceeds this many number of block objects for + blocksStagedQueueLengthHwm* = 2 + ## If the staged block queue exceeds this many number of queue objects for ## import, no further block objets are added (but the current sub-list is ## completed.) - blocksStagedLwm* = nFetchBodiesBatch - ## Minimal accepted initialisation value for `blocksStagedHwm`. The latter - ## will be initalised with `blocksStagedHwmDefault` if smaller than the LWM. - # ---------------------- static: @@ -144,12 +123,9 @@ static: doAssert 0 < nFetchHeadersRequest doAssert nFetchHeadersRequest <= nFetchHeadersBatch - doAssert 0 < headersStagedQueueLengthLwm - doAssert headersStagedQueueLengthLwm < headersStagedQueueLengthHwm + doAssert 0 < headersStagedQueueLengthHwm doAssert 0 < nFetchBodiesRequest - doAssert nFetchBodiesRequest <= nFetchBodiesBatch - doAssert 0 < blocksStagedLwm - doAssert blocksStagedLwm <= blocksStagedHwmDefault + doAssert 0 < blocksStagedQueueLengthHwm # End diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index d406f7bf27..afc7c15764 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -71,12 +71,15 @@ type unprocessed*: BnRangeSet ## Block or header ranges to fetch borrowed*: BnRangeSet ## Fetched/locked ranges staged*: LinkedHChainQueue ## Blocks fetched but not stored yet + reserveStaged*: int ## Pre-book staged slot temporarily BlocksFetchSync* = object ## Block sync staging area unprocessed*: BnRangeSet ## Blocks download requested borrowed*: BnRangeSet ## Fetched/locked fetched ranges + topImported*: BlockNumber ## For syncronising opportunistic import staged*: StagedBlocksQueue ## Blocks ready for import + reserveStaged*: int ## Pre-book staged slot temporarily # ------------------- @@ -104,11 +107,6 @@ type chain*: ForkedChainRef ## Core database, FCU support hdrCache*: HeaderChainRef ## Currently in tandem with `chain` - # Blocks import/execution settings - blkImportOk*: bool ## Don't fetch data while block importing - blkStagedHwm*: int ## Set a `staged` queue limit - blkStagedLenHwm*: int ## Figured out as # staged records - # Info, debugging, and error handling stuff nReorg*: int ## Number of reorg invocations (info only) hdrProcError*: Table[Hash,uint8] ## Some globally accessible header errors @@ -138,6 +136,10 @@ func head*(ctx: BeaconCtxRef): Header = ## Shortcut ctx.hdrCache.head() +func headHash*(ctx: BeaconCtxRef): Hash32 = + ## Shortcut + ctx.hdrCache.headHash() + func dangling*(ctx: BeaconCtxRef): Header = ## Shortcut ctx.hdrCache.antecedent() diff --git a/execution_chain/sync/sync_sched.nim b/execution_chain/sync/sync_sched.nim index 96328f14a1..b6f3bc074c 100644 --- a/execution_chain/sync/sync_sched.nim +++ b/execution_chain/sync/sync_sched.nim @@ -186,7 +186,8 @@ proc terminate[S,W](dsc: RunnerSyncRef[S,W]) {.async: (raises: []).} = try: waitFor sleepAsync termWaitPollingTime except CancelledError: - trace "Shutdown: peer timeout was cancelled", nWorkers=dsc.buddies.len + trace "Shutdown: peer timeout was cancelled", + nCachedWorkers=dsc.buddies.len while dsc.daemonRunning or dsc.tickerRunning: @@ -194,7 +195,8 @@ proc terminate[S,W](dsc: RunnerSyncRef[S,W]) {.async: (raises: []).} = try: await sleepAsync termWaitPollingTime except CancelledError: - trace "Shutdown: daemon timeout was cancelled", nWorkers=dsc.buddies.len + trace "Shutdown: daemon timeout was cancelled", + nCachedWorkers=dsc.buddies.len # Final shutdown dsc.ctx.runRelease() @@ -237,7 +239,8 @@ proc daemonLoop[S,W](dsc: RunnerSyncRef[S,W]) {.async: (raises: []).} = # Stop on error (must not end up in busy-loop). If the activation flag # `dsc.ctx.daemon` remains `true`, the deamon will be re-started from # the worker loop in due time. - trace "Deamon loop sleep was cancelled", nWorkers=dsc.buddies.len + trace "Deamon loop sleep was cancelled", + nCachedWorkers=dsc.buddies.len break # End while @@ -364,7 +367,8 @@ proc workerLoop[S,W](buddy: RunnerBuddyRef[S,W]) {.async: (raises: []).} = try: await sleepAsync suspend except CancelledError: - trace "Peer loop sleep was cancelled", peer, nWorkers=dsc.buddies.len + trace "Peer loop sleep was cancelled", peer, + nCachedWorkers=dsc.buddies.len break # stop on error (must not end up in busy-loop) # End while @@ -382,7 +386,7 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = # Check for known entry (which should not exist.) let - maxWorkers {.used.} = dsc.buddiesMax + maxCachedWorkers {.used.} = dsc.buddiesMax nPeers {.used.} = dsc.pool.len zombie = dsc.buddies.eq peer.key if zombie.isOk: @@ -391,12 +395,13 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = ttz = zombie.value.zombified + zombieTimeToLinger if ttz < Moment.now(): if dsc.ctx.noisyLog: trace "Reconnecting zombie peer ignored", peer, - nPeers, nWorkers=dsc.buddies.len, maxWorkers, canRequeue=(now-ttz) + nPeers, nCachedWorkers=dsc.buddies.len, maxCachedWorkers, + canRequeue=(now-ttz) return # Zombie can be removed from the database dsc.buddies.del peer.key if dsc.ctx.noisyLog: trace "Zombie peer timeout, ready for requeing", peer, - nPeers, nWorkers=dsc.buddies.len, maxWorkers + nPeers, nCachedWorkers=dsc.buddies.len, maxCachedWorkers # Initialise worker for this peer let buddy = RunnerBuddyRef[S,W]( @@ -408,7 +413,7 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = peerID: peer.key.hash)) if not buddy.worker.runStart(): if dsc.ctx.noisyLog: trace "Ignoring useless peer", peer, nPeers, - nWorkers=dsc.buddies.len, maxWorkers + nCachedWorkers=dsc.buddies.len, maxCachedWorkers buddy.worker.ctrl.zombie = true return @@ -426,14 +431,15 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = if dsc.ctx.noisyLog: trace "Dequeuing zombie peer", # Fake `Peer` pretty print for `oldest` oldest=("Node[" & $leastVal.key.address & "]"), - since=leastVal.data.zombified, nPeers, nWorkers=dsc.buddies.len, - maxWorkers + since=leastVal.data.zombified, nPeers, nCachedWorkers=dsc.buddies.len, + maxCachedWorkers discard else: # This could happen if there are idle entries in the table, i.e. # somehow hanging runners. if dsc.ctx.noisyLog: trace "Peer table full! Dequeuing least used entry", - oldestPeer=oldest.peer, oldestOnly=oldest.only, nPeers=nPeers, nWorkers=dsc.buddies.len, maxWorkers + oldestPeer=oldest.peer, oldestOnly=oldest.only, nPeers=nPeers, + nCachedWorkers=dsc.buddies.len, maxCachedWorkers # Setting to `zombie` will trigger the worker to terminate (if any.) oldest.ctrl.zombie = true @@ -446,16 +452,16 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = proc onPeerDisconnected[S,W](dsc: RunnerSyncRef[S,W], peer: Peer) = let nPeers = dsc.pool.len - maxWorkers = dsc.buddiesMax - nWorkers = dsc.buddies.len + maxCachedWorkers = dsc.buddiesMax + nCachedWorkers = dsc.buddies.len rc = dsc.buddies.eq peer.key if rc.isErr: if dsc.ctx.noisyLog: debug "Disconnected, unregistered peer", peer, - nPeers, nWorkers, maxWorkers + nPeers, nCachedWorkers, maxCachedWorkers elif rc.value.worker.isNil: # Re-visiting zombie if dsc.ctx.noisyLog: trace "Ignore zombie", peer, - nPeers, nWorkers, maxWorkers + nPeers, nCachedWorkers, maxCachedWorkers elif rc.value.worker.ctrl.zombie: # Don't disconnect, leave them fall out of the LRU cache. The effect is, # that reconnecting might be blocked, for a while. For few peers cases, @@ -465,7 +471,7 @@ proc onPeerDisconnected[S,W](dsc: RunnerSyncRef[S,W], peer: Peer) = rc.value.dsc = nil rc.value.zombified = Moment.now() if dsc.ctx.noisyLog: trace "Disconnected, zombie", peer, - nPeers, nWorkers, maxWorkers + nPeers, nCachedWorkers, maxCachedWorkers else: rc.value.worker.ctrl.stopped = true # in case it is hanging somewhere dsc.buddies.del peer.key From 765b69f57037800415d8ca2febe30b0eca267f43 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 20 May 2025 14:45:24 +0800 Subject: [PATCH 022/138] Portal Bridge: Restructure portal bridge folders (#3308) * Restructure portal bridge folders. --- Makefile | 5 ++++- .../portal_beacon_bridge.nim} | 9 +++++---- .../rpc_helpers.nim} | 4 ++-- .../portal_history_bridge.nim} | 17 +++++++++-------- portal/bridge/nimbus_portal_bridge.nim | 8 ++++---- portal/bridge/nimbus_portal_bridge_conf.nim | 7 +++++-- .../bridge/{state_bridge => state}/database.nim | 0 .../{state_bridge => state}/offers_builder.nim | 0 .../portal_state_bridge.nim} | 13 +++++++------ .../{state_bridge => state}/state_diff.nim | 2 +- .../{state_bridge => state}/world_state.nim | 0 .../world_state_helper.nim | 0 12 files changed, 37 insertions(+), 28 deletions(-) rename portal/bridge/{portal_bridge_beacon.nim => beacon/portal_beacon_bridge.nim} (98%) rename portal/bridge/{portal_bridge_common.nim => common/rpc_helpers.nim} (97%) rename portal/bridge/{portal_bridge_history.nim => history/portal_history_bridge.nim} (97%) rename portal/bridge/{state_bridge => state}/database.nim (100%) rename portal/bridge/{state_bridge => state}/offers_builder.nim (100%) rename portal/bridge/{portal_bridge_state.nim => state/portal_state_bridge.nim} (98%) rename portal/bridge/{state_bridge => state}/state_diff.nim (99%) rename portal/bridge/{state_bridge => state}/world_state.nim (100%) rename portal/bridge/{state_bridge => state}/world_state_helper.nim (100%) diff --git a/Makefile b/Makefile index b1bfbacde6..74681a2009 100644 --- a/Makefile +++ b/Makefile @@ -77,7 +77,10 @@ PORTAL_TOOLS := \ fcli_db PORTAL_TOOLS_DIRS := \ portal/bridge \ - portal/bridge/state_bridge \ + portal/bridge/common \ + portal/bridge/beacon \ + portal/bridge/history \ + portal/bridge/state \ portal/tools # comma-separated values for the "clean" target PORTAL_TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(FLUFFY_TOOLS)) diff --git a/portal/bridge/portal_bridge_beacon.nim b/portal/bridge/beacon/portal_beacon_bridge.nim similarity index 98% rename from portal/bridge/portal_bridge_beacon.nim rename to portal/bridge/beacon/portal_beacon_bridge.nim index 9957ce17ae..b23f85c5ae 100644 --- a/portal/bridge/portal_bridge_beacon.nim +++ b/portal/bridge/beacon/portal_beacon_bridge.nim @@ -16,10 +16,11 @@ import eth/async_utils, json_rpc/clients/httpclient, beacon_chain/spec/eth2_apis/rest_beacon_client, - ../network/beacon/beacon_content, - ../rpc/portal_rpc_client, - ../tools/eth_data_exporter/cl_data_exporter, - ./[nimbus_portal_bridge_conf, portal_bridge_common] + ../../network/beacon/beacon_content, + ../../rpc/portal_rpc_client, + ../../tools/eth_data_exporter/cl_data_exporter, + ../common/rpc_helpers, + ../nimbus_portal_bridge_conf const restRequestsTimeout = 30.seconds diff --git a/portal/bridge/portal_bridge_common.nim b/portal/bridge/common/rpc_helpers.nim similarity index 97% rename from portal/bridge/portal_bridge_common.nim rename to portal/bridge/common/rpc_helpers.nim index 39bdc41f86..1df03aa451 100644 --- a/portal/bridge/portal_bridge_common.nim +++ b/portal/bridge/common/rpc_helpers.nim @@ -11,8 +11,8 @@ import chronicles, json_rpc/rpcclient, web3/[eth_api, eth_api_types], - ../rpc/rpc_calls/rpc_trace_calls, - ./nimbus_portal_bridge_conf + ../../rpc/rpc_calls/rpc_trace_calls, + ../nimbus_portal_bridge_conf export rpcclient diff --git a/portal/bridge/portal_bridge_history.nim b/portal/bridge/history/portal_history_bridge.nim similarity index 97% rename from portal/bridge/portal_bridge_history.nim rename to portal/bridge/history/portal_history_bridge.nim index 89db1c5501..27e7b26738 100644 --- a/portal/bridge/portal_bridge_history.nim +++ b/portal/bridge/history/portal_history_bridge.nim @@ -17,14 +17,15 @@ import eth/common/keys, eth/common/[base, headers_rlp, blocks_rlp, receipts], eth/p2p/discoveryv5/random2, - ../../execution_chain/beacon/web3_eth_conv, - ../../hive_integration/nodocker/engine/engine_client, - ../rpc/portal_rpc_client, - ../network/history/[history_content, history_type_conversions, history_validation], - ../network/network_metadata, - ../eth_data/[era1, history_data_ssz_e2s, history_data_seeding], - ../database/era1_db, - ./[nimbus_portal_bridge_conf, portal_bridge_common] + ../../../execution_chain/beacon/web3_eth_conv, + ../../../hive_integration/nodocker/engine/engine_client, + ../../rpc/portal_rpc_client, + ../../network/history/[history_content, history_type_conversions, history_validation], + ../../network/network_metadata, + ../../eth_data/[era1, history_data_ssz_e2s, history_data_seeding], + ../../database/era1_db, + ../common/rpc_helpers, + ../nimbus_portal_bridge_conf from stew/objects import checkedEnumAssign diff --git a/portal/bridge/nimbus_portal_bridge.nim b/portal/bridge/nimbus_portal_bridge.nim index 9008a46936..ab86cee5f2 100644 --- a/portal/bridge/nimbus_portal_bridge.nim +++ b/portal/bridge/nimbus_portal_bridge.nim @@ -51,10 +51,10 @@ import confutils, confutils/std/net, ../logging, - ./[ - nimbus_portal_bridge_conf, portal_bridge_beacon, portal_bridge_history, - portal_bridge_state, - ] + ./beacon/portal_beacon_bridge, + ./history/portal_history_bridge, + ./state/portal_state_bridge, + ./nimbus_portal_bridge_conf type PortalBridgeStatus = enum Running diff --git a/portal/bridge/nimbus_portal_bridge_conf.nim b/portal/bridge/nimbus_portal_bridge_conf.nim index 04e52af7d0..336494e8d3 100644 --- a/portal/bridge/nimbus_portal_bridge_conf.nim +++ b/portal/bridge/nimbus_portal_bridge_conf.nim @@ -14,7 +14,6 @@ import nimcrypto/hash, ../network/network_metadata, ../eth_data/era1, - ../client/nimbus_portal_client_conf, ../logging export net @@ -33,7 +32,7 @@ proc defaultEthDataDir*(): string = proc defaultEra1DataDir*(): string = defaultEthDataDir() / "era1" -proc defaultPortalBridgeStateDir*(): string = +proc defaultPortalBridgeDir*(): string = let relativeDataDir = when defined(windows): "AppData" / "Roaming" / "Nimbus" / "PortalBridge" @@ -44,6 +43,10 @@ proc defaultPortalBridgeStateDir*(): string = getHomeDir() / relativeDataDir +proc defaultPortalBridgeStateDir*(): string = + let stateDir = when defined(windows) or defined(macosx): "State" else: "state" + defaultPortalBridgeDir() / stateDir + const defaultEndEra* = uint64(era(network_metadata.mergeBlockNumber - 1)) type diff --git a/portal/bridge/state_bridge/database.nim b/portal/bridge/state/database.nim similarity index 100% rename from portal/bridge/state_bridge/database.nim rename to portal/bridge/state/database.nim diff --git a/portal/bridge/state_bridge/offers_builder.nim b/portal/bridge/state/offers_builder.nim similarity index 100% rename from portal/bridge/state_bridge/offers_builder.nim rename to portal/bridge/state/offers_builder.nim diff --git a/portal/bridge/portal_bridge_state.nim b/portal/bridge/state/portal_state_bridge.nim similarity index 98% rename from portal/bridge/portal_bridge_state.nim rename to portal/bridge/state/portal_state_bridge.nim index d297c7ccd0..ecc52fa2a6 100644 --- a/portal/bridge/portal_bridge_state.nim +++ b/portal/bridge/state/portal_state_bridge.nim @@ -17,12 +17,13 @@ import web3/[eth_api, eth_api_types], results, eth/common/[addresses_rlp, hashes_rlp], - ../../execution_chain/common/chain_config, - ../rpc/rpc_calls/rpc_trace_calls, - ../rpc/portal_rpc_client, - ../network/state/[state_content, state_gossip], - ./state_bridge/[database, state_diff, world_state_helper, offers_builder], - ./[nimbus_portal_bridge_conf, portal_bridge_common] + ../../../execution_chain/common/chain_config, + ../../rpc/rpc_calls/rpc_trace_calls, + ../../rpc/portal_rpc_client, + ../../network/state/[state_content, state_gossip], + ./[database, state_diff, world_state_helper, offers_builder], + ../common/rpc_helpers, + ../nimbus_portal_bridge_conf logScope: topics = "portal_bridge" diff --git a/portal/bridge/state_bridge/state_diff.nim b/portal/bridge/state/state_diff.nim similarity index 99% rename from portal/bridge/state_bridge/state_diff.nim rename to portal/bridge/state/state_diff.nim index 6b76a2c9d5..decfce7061 100644 --- a/portal/bridge/state_bridge/state_diff.nim +++ b/portal/bridge/state/state_diff.nim @@ -13,7 +13,7 @@ import stint, eth/common/base, ../../rpc/rpc_calls/rpc_trace_calls, - ../portal_bridge_common + ../common/rpc_helpers type DiffType* = enum diff --git a/portal/bridge/state_bridge/world_state.nim b/portal/bridge/state/world_state.nim similarity index 100% rename from portal/bridge/state_bridge/world_state.nim rename to portal/bridge/state/world_state.nim diff --git a/portal/bridge/state_bridge/world_state_helper.nim b/portal/bridge/state/world_state_helper.nim similarity index 100% rename from portal/bridge/state_bridge/world_state_helper.nim rename to portal/bridge/state/world_state_helper.nim From c2a67b758ae9865d763bd16374abf4b0e1a7c84f Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 20 May 2025 15:39:14 +0800 Subject: [PATCH 023/138] Portal Client: Minor improvements to portal docs (#3309) --- .../the_fluffy_book/docs/access-content.md | 4 ++-- .../docs/the_fluffy_book/docs/architecture.md | 6 +++--- .../docs/beacon-content-bridging.md | 4 ++-- .../the_fluffy_book/docs/build-from-source.md | 2 +- .../docs/calling-a-contract.md | 2 +- .../docs/history-content-bridging.md | 19 +++++++++---------- portal/docs/the_fluffy_book/docs/index.md | 2 +- .../docs/state-content-bridging.md | 13 ++++++------- portal/docs/the_fluffy_book/docs/upgrade.md | 2 +- 9 files changed, 26 insertions(+), 28 deletions(-) diff --git a/portal/docs/the_fluffy_book/docs/access-content.md b/portal/docs/the_fluffy_book/docs/access-content.md index f1cb7d27de..5f289459da 100644 --- a/portal/docs/the_fluffy_book/docs/access-content.md +++ b/portal/docs/the_fluffy_book/docs/access-content.md @@ -1,6 +1,6 @@ # Access content on the Portal network -Once you have a Portal node [connected to network](./connect-to-portal.md) with +Once you have a Portal node [connected to the network](./connect-to-portal.md) with the JSON-RPC interface enabled, then you can access the content available on the Portal network. @@ -24,4 +24,4 @@ BLOCKHASH=0x55b11b918355b1ef9c5db810302ebad0bf2544255b530cdce90674d5887bb286 # R \ No newline at end of file +and about recently audited data at http://glados.ethportal.net/content/ --> diff --git a/portal/docs/the_fluffy_book/docs/architecture.md b/portal/docs/the_fluffy_book/docs/architecture.md index b9806f80b9..a34280461a 100644 --- a/portal/docs/the_fluffy_book/docs/architecture.md +++ b/portal/docs/the_fluffy_book/docs/architecture.md @@ -3,7 +3,7 @@ This section outlines the Nimbus Portal client's architecture and shows the main components in the codebase. The arrows indicate a dependancy relationship between each component. -## Nimbus Portal Client high level architecture +## Nimbus Portal client high level architecture This diagram outlines the Nimbus Portal client high-level architecture. ```mermaid @@ -18,7 +18,7 @@ graph TD; id2(PortalNode) --> id10(Discv5Protocol) ``` -When `nimbus_portal_client` starts it runs an instance of `PortalNode` which manages the `Discv5Protocol`, `BeaconNetwork`, `HistoryNetwork` and `StateNetwork` instances. There is a single instance of each of these components and each of the subnetwork instances can be enabled/disabled depending on the startup configuration selected. The `PortalNode` instance includes everything needed to participate in the Portal network to enable storage of offered content and serving content requests from other Portal nodes. It may become part of a library in the future which would allow other projects to easily embed an instance of `nimbus_portal_client` in their codebase. +When the Nimbus Portal client starts it runs an instance of `PortalNode` which manages the `Discv5Protocol`, `BeaconNetwork`, `HistoryNetwork` and `StateNetwork` instances. There is a single instance of each of these components and each of the subnetwork instances can be enabled/disabled depending on the startup configuration selected. The `PortalNode` instance includes everything needed to participate in the Portal network to enable storage of offered content and serving content requests from other Portal nodes. It may become part of a library in the future which would allow other projects to easily embed an instance of the Nimbus Portal client in their codebase. The `RpcHttpServer` and `RpcWebSocketServer` enable serving JSON-RPC requests from Portal network over HTTP and WebSocket respectively. These RPC servers depend on the Nimbus Portal EVM (`AsyncEvm`) in order to implement the various endpoints which require asyncronous transaction execution while fetching state from the Portal network. @@ -61,7 +61,7 @@ in each of the Portal Wire subprotocols. The `RadiusCache` holds the last known when pinging each node in the routing table periodically. The `OfferCache` caches the content ids of the most recent content successfully offered and stored so that the Portal node can reject content that it already has without doing a database lookup. The `ContentCache` improves the performance of content lookups (used by the JSON-RPC API's) by caching the most recently fetched content in a LRU cache. -The `ContentDb` is the main database in `nimbus_portal_client` which internally uses sqlite to store the content data on disk. The `PortalProtocol` +The `ContentDb` is the main database in the Nimbus Portal client which internally uses sqlite to store the content data on disk. The `PortalProtocol` uses the `OfferQueue` to hold pending offer requests which are passed to the `PortalStream` by the concurrent offer workers which run as a part of `PortalProtocol`. diff --git a/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md b/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md index 01a784ad72..d412847bc1 100644 --- a/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/beacon-content-bridging.md @@ -8,7 +8,7 @@ Run a Nimbus Portal client with the JSON-RPC API enabled. ./build/nimbus_portal_client --rpc ``` -Build & run the `nimbus_portal_bridge` for the beacon network: +Build & run the Nimbus Portal bridge for the beacon network: ```bash make nimbus_portal_bridge @@ -18,7 +18,7 @@ TRUSTED_BLOCK_ROOT=0x12345678901234567890123456789012345678901234567890123456789 ./build/nimbus_portal_bridge beacon --trusted-block-root:${TRUSTED_BLOCK_ROOT} --rest-url:http://127.0.0.1:5052 --portal-rpc-url:http://127.0.0.1:8545 ``` -The `nimbus_portal_bridge` will connect to Nimbus Portal client over the JSON-RPC +The Nimbus Portal bridge will connect to the Nimbus Portal client over the JSON-RPC interface and start gossiping an `LightClientBootstrap` for given trusted block root and gossip backfill `LightClientUpdate`s. diff --git a/portal/docs/the_fluffy_book/docs/build-from-source.md b/portal/docs/the_fluffy_book/docs/build-from-source.md index e7a63e806b..f02a0e9a3f 100644 --- a/portal/docs/the_fluffy_book/docs/build-from-source.md +++ b/portal/docs/the_fluffy_book/docs/build-from-source.md @@ -5,7 +5,7 @@ turned on. The build process itself is simple and fully automated, but may take a few minutes. !!! note "Nim" - `nimbus_portal_client` is written in the [Nim](https://nim-lang.org) programming language. + The Nimbus Portal client is written in the [Nim](https://nim-lang.org) programming language. The correct version will automatically be downloaded as part of the build process! ## Prerequisites diff --git a/portal/docs/the_fluffy_book/docs/calling-a-contract.md b/portal/docs/the_fluffy_book/docs/calling-a-contract.md index 5e95e1f062..e44a7a772e 100644 --- a/portal/docs/the_fluffy_book/docs/calling-a-contract.md +++ b/portal/docs/the_fluffy_book/docs/calling-a-contract.md @@ -4,7 +4,7 @@ Once you have a Portal node running and [connected to the network](./connect-to- the JSON-RPC interface enabled, then you can call contracts using the `eth_call` JSON-RPC method which should be enabled by default. -Note that `eth_call` in `nimbus_portal_client` requires both the history network, state network and the `eth` +Note that `eth_call` in the Nimbus Portal client requires both the history network, state network and the `eth` rpc api to be enabled. These should be enabled by default already but you can also manually enable these by running: diff --git a/portal/docs/the_fluffy_book/docs/history-content-bridging.md b/portal/docs/the_fluffy_book/docs/history-content-bridging.md index 59b2bb4671..667abbc4a3 100644 --- a/portal/docs/the_fluffy_book/docs/history-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/history-content-bridging.md @@ -1,8 +1,8 @@ # Bridging content into the Portal history network -## Seeding history content with the `nimbus_portal_bridge` +## Seeding history content with the Nimbus Portal bridge -The `nimbus_portal_bridge` requires `era1` files as source for the block content from before the merge. +The Nimbus Portal bridge requires `era1` files as source for the block content from before the merge. It requires access to a full node with EL JSON-RPC API for seeding the latest (head of the chain) block content. Any block content between the merge and the latest is currently not implemented, but will be implemented in the future by usage of `era` files as source. @@ -15,17 +15,16 @@ Run a Portal client with the Portal JSON-RPC API enabled, e.g. Nimbus Portal cli ``` > Note: The `--storage-capacity:0` option is not required, but it is added here -for the use case where the node's only focus is on gossiping content from the -`nimbus_portal_bridge`. +for the use case where the node's only focus is on gossiping content from the portal bridge. ### Step 2: Run an EL client -The `nimbus_portal_bridge` needs access to the EL JSON-RPC API, either through a local +The Nimbus Portal bridge needs access to the EL JSON-RPC API, either through a local Ethereum client or via a web3 provider. ### Step 3: Run the Portal bridge in history mode -Build & run the `nimbus_portal_bridge`: +Build & run the Nimbus Portal bridge: ```bash make nimbus_portal_bridge @@ -33,11 +32,11 @@ WEB3_URL="http://127.0.0.1:8548" # Replace with your provider. ./build/nimbus_portal_bridge history --web3-url:${WEB3_URL} ``` -Default the `nimbus_portal_bridge` will run in `--latest` mode, which means that only the +By default the Nimbus Portal bridge will run in `--latest` mode, which means that only the latest block content will be gossiped into the network. -The `nimbus_portal_bridge` also has a `--backfill` mode which will gossip pre-merge blocks -from `era1` files into the network. Default the bridge will audit first whether +It also has a `--backfill` mode which will gossip pre-merge blocks +from `era1` files into the network. By default the bridge will audit first whether the content is available on the network and if not it will gossip it into the network. @@ -47,7 +46,7 @@ WEB3_URL="http://127.0.0.1:8548" # Replace with your provider. ./build/nimbus_portal_bridge history --latest:true --backfill:true --audit:true --era1-dir:/somedir/era1/ --web3-url:${WEB3_URL} ``` -## Seeding directly from the nimbus_portal_client +## Seeding directly from the Nimbus Portal client This method currently only supports seeding block content from before the merge. It uses `era1` files as source for the content. diff --git a/portal/docs/the_fluffy_book/docs/index.md b/portal/docs/the_fluffy_book/docs/index.md index dd2a390996..4593338ab0 100644 --- a/portal/docs/the_fluffy_book/docs/index.md +++ b/portal/docs/the_fluffy_book/docs/index.md @@ -1,4 +1,4 @@ -# The Nimbus Portal client Guide +# The Nimbus Portal Client Guide The Nimbus Portal client is the Nimbus client implementation of the [Portal network specifications](https://github.com/ethereum/portal-network-specs). diff --git a/portal/docs/the_fluffy_book/docs/state-content-bridging.md b/portal/docs/the_fluffy_book/docs/state-content-bridging.md index 30c0091cfa..d05aac3c5a 100644 --- a/portal/docs/the_fluffy_book/docs/state-content-bridging.md +++ b/portal/docs/the_fluffy_book/docs/state-content-bridging.md @@ -2,7 +2,7 @@ ## Seeding from content bridges -### Seeding state data with the `nimbus_portal_bridge` +### Seeding state data with the Nimbus Portal bridge #### Step 1: Run a Portal client @@ -14,13 +14,12 @@ Run a Portal client with the Portal JSON-RPC API enabled (e.g. Nimbus Portal cli ``` > Note: The `--storage-capacity:0` option is not required, but it is added here -for the use case where the node's only focus is on gossiping content from the -`nimbus_portal_bridge`. +for the use case where the node's only focus is on gossiping content from the portal bridge. #### Step 2: Run an EL client (archive node) that supports `trace_replayBlockTransactions` -The `nimbus_portal_bridge` needs access to the EL JSON-RPC API, either through a local +The Nimbus Portal bridge needs access to the EL JSON-RPC API, either through a local Ethereum client or via a web3 provider. Currently the portal state bridge requires access to the following EL JSON-RPC APIs: @@ -39,7 +38,7 @@ to ensure that the state is available for all the historical blocks being synced #### Step 3: Run the Portal bridge in state mode -Build & run the `nimbus_portal_bridge`: +Build & run the Nimbus Portal bridge: ```bash make nimbus_portal_bridge @@ -60,8 +59,8 @@ The `--gossip-workers` parameter can be used to set the number of workers that w gossip the portal state data into the portal state subnetwork. Each worker handles gossipping the state for a single block and the workers gossip the data concurrently. It is recommended to increase the number of workers in order to increase the speed -and throughput of the gossiping process up until the point where `nimbus_portal_bridge` is unable -keep up. +and throughput of the gossiping process up until the point where the connected portal +client is unable keep up. The optional `--verify-gossip` parameter can be used to verify that the state data has successfully been gossipped and is available on the portal state network. When this diff --git a/portal/docs/the_fluffy_book/docs/upgrade.md b/portal/docs/the_fluffy_book/docs/upgrade.md index 02f0e14a95..17491c9302 100644 --- a/portal/docs/the_fluffy_book/docs/upgrade.md +++ b/portal/docs/the_fluffy_book/docs/upgrade.md @@ -24,5 +24,5 @@ make -j4 nimbus_portal_client Complete the upgrade by restarting the node. !!! tip - To check which version of `nimbus_portal_client` you're currently running, run + To check which version of the Nimbus Portal client you're currently running, run `./build/nimbus_portal_client --version` From 2183475062441e627ff82b076eb5e01a51d495a3 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 20 May 2025 10:29:11 +0200 Subject: [PATCH 024/138] Fix ContentDb and BeaconDb to close properly (#3307) --- portal/database/content_db.nim | 16 +++------------- portal/network/beacon/beacon_db.nim | 4 ++++ portal/tools/fcli_db.nim | 6 +++++- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/portal/database/content_db.nim b/portal/database/content_db.nim index f6d9d1e6ea..58ab5f65a6 100644 --- a/portal/database/content_db.nim +++ b/portal/database/content_db.nim @@ -269,21 +269,11 @@ proc new*( contentDb.setInitialRadius(radiusConfig) contentDb -template disposeSafe(s: untyped): untyped = - if distinctBase(s) != nil: - s.dispose() - s = typeof(s)(nil) - proc close*(db: ContentDB) = - db.sizeStmt.disposeSafe() - db.unusedSizeStmt.disposeSafe() - db.vacuumStmt.disposeSafe() - db.contentCountStmt.disposeSafe() - db.contentSizeStmt.disposeSafe() - db.getAllOrderedByDistanceStmt.disposeSafe() - db.deleteOutOfRadiusStmt.disposeSafe() - db.largestDistanceStmt.disposeSafe() discard db.kv.close() + # statements get "disposed" in `close` call as they got created as managed = true + db.backend.close() + db.backend = nil ## Private ContentDB calls diff --git a/portal/network/beacon/beacon_db.nim b/portal/network/beacon/beacon_db.nim index c788d4ea09..5bb545ade7 100644 --- a/portal/network/beacon/beacon_db.nim +++ b/portal/network/beacon/beacon_db.nim @@ -370,8 +370,12 @@ proc close*(db: BeaconDb) = db.bootstraps.close() db.bestUpdates.close() db.historicalSummaries.close() + discard db.kv.close() + db.backend.close() + db.backend = nil + template finalityUpdateCache(db: BeaconDb): Opt[LightClientFinalityUpdateCache] = db.beaconDbCache.finalityUpdateCache diff --git a/portal/tools/fcli_db.nim b/portal/tools/fcli_db.nim index 7ad37ddbf2..1e8c8cb4f1 100644 --- a/portal/tools/fcli_db.nim +++ b/portal/tools/fcli_db.nim @@ -81,7 +81,8 @@ proc cmdGenerate(conf: DbConf) = for i in 0 ..< conf.contentAmount: let key = rng[].generateRandomU256() - db.put(key, bytes) + + db.close() proc cmdBench(conf: DbConf) = let @@ -138,6 +139,8 @@ proc cmdBench(conf: DbConf) = printTimers(timers) + db.close() + proc cmdPrune(conf: DbConf) = if conf.reclaimOnly: let db = ContentDB.new( @@ -149,6 +152,7 @@ proc cmdPrune(conf: DbConf) = ) db.reclaimAndTruncate() + db.close() else: notice "Functionality not yet implemented" quit QuitSuccess From 50a2821134d8b2cfc6da3da98b889364a532f770 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 20 May 2025 10:57:01 +0200 Subject: [PATCH 025/138] Rename fluffy refs now that hive and docker image are updated (#3310) - Hive has been updated from fluffy to nimbus-portal - Docker hub repo nimbus-portal-client has been created and latest build has been added there --- .../docs/nimbus-portal-with-hive.md | 14 +++++++------- .../the_fluffy_book/docs/quick-start-docker.md | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md b/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md index 5d1abc0951..611b431eec 100644 --- a/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md +++ b/portal/docs/the_fluffy_book/docs/nimbus-portal-with-hive.md @@ -18,13 +18,13 @@ Example commands for running test suites: ```sh # Run the portal hive tests with only the Nimbus Portal client -./hive --sim portal --client fluffy +./hive --sim portal --client nimbus-portal # Run the portal hive tests with different clients -./hive --sim portal --client fluffy,trin,ultralight,shisui +./hive --sim portal --client nimbus-portal,trin,ultralight,shisui # Run portal hive tests from a specific portal hive simulator -./hive --sim portal --client fluffy --sim.limit history-interop +./hive --sim portal --client nimbus-portal --sim.limit history-interop ``` Access the results through web-ui: @@ -48,11 +48,11 @@ To do that follow next steps: 2) Build the local development Docker image using the following command: ``` -docker build --tag fluffy-dev --file ./portal/docker/Dockerfile.debug . +docker build --tag nimbus-portal-dev --file ./portal/docker/Dockerfile.debug . ``` -3) Modify the `FROM` tag in the portal-hive `Dockerfile` of fluffy at -`./hive/clients/fluffy/Dockerfile` to use the image that was build in step 2. +3) Modify the `FROM` tag in the portal-hive `Dockerfile` of Nimbus Portal client at +`./hive/clients/nimbus-portal/Dockerfile` to use the image that was build in step 2. 4) Run the tests as [usual](nimbus-portal-with-portal-hive.md/#run-the-hive-tests-locally). @@ -62,4 +62,4 @@ docker build --tag fluffy-dev --file ./portal/docker/Dockerfile.debug . `vendors/` from `./portal/docker/Dockerfile.debug.dockerignore`. !!! note - When developing on Linux the `./portal/docker/Dockerfile.debug.linux` Dockerfile can also be used instead. It does require to manually build fluffy first as it copies over this binary. + When developing on Linux the `./portal/docker/Dockerfile.debug.linux` Dockerfile can also be used instead. It does require to manually build `nimbus_portal_client` first as it copies over this binary. diff --git a/portal/docs/the_fluffy_book/docs/quick-start-docker.md b/portal/docs/the_fluffy_book/docs/quick-start-docker.md index 51c1cfcd71..c49b5843d8 100644 --- a/portal/docs/the_fluffy_book/docs/quick-start-docker.md +++ b/portal/docs/the_fluffy_book/docs/quick-start-docker.md @@ -1,7 +1,7 @@ # Quick start - Docker This page takes you through the steps of getting the Nimbus Portal client running -on the public network by use of the [public Docker image](https://hub.docker.com/r/statusim/nimbus-fluffy/tags). +on the public network by use of the [public Docker image](https://hub.docker.com/r/statusim/nimbus-portal-client/tags). The Docker image gets rebuild from latest master every night and only `amd64` is supported currently. @@ -14,7 +14,7 @@ The Docker image gets rebuild from latest master every night and only `amd64` is ```bash # Connect to the Portal bootstrap nodes and enable the JSON-RPC APIs. -docker container run -p 8545:8545 statusim/nimbus-fluffy:amd64-master-latest --rpc --rpc-address:0.0.0.0 +docker container run -p 8545:8545 statusim/nimbus-portal-client:amd64-master-latest --rpc --rpc-address:0.0.0.0 ``` !!! note Port 8545 is published and `rpc-address` is set to the `ANY` address in this command to allow access to the JSON-RPC API from outside the Docker image. You might want to adjust that depending on the use case & security model. From 48d2bbac5decbd8aa9e10eb8053fc02f4b5bdda4 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 20 May 2025 13:40:24 +0200 Subject: [PATCH 026/138] Update to latest grafana dashboard + remove fluffy references (#3311) - Update grafana dashboard to the latest one used for our fleet - Update that grafna dashboard to use Nimbus Portal naming - Remove some left-over fluffy naming --- portal/docs/the_fluffy_book/docs/metrics.md | 2 +- ...ard.json => portal_grafana_dashboard.json} | 2618 +++++++++-------- portal/network/state/state_network.nim | 2 +- .../test_state_endpoints_vectors.nim | 2 +- .../test_portal_wire_protocol.nim | 2 +- 5 files changed, 1463 insertions(+), 1163 deletions(-) rename portal/metrics/grafana/{fluffy_grafana_dashboard.json => portal_grafana_dashboard.json} (53%) diff --git a/portal/docs/the_fluffy_book/docs/metrics.md b/portal/docs/the_fluffy_book/docs/metrics.md index e098962ba7..23fd907492 100644 --- a/portal/docs/the_fluffy_book/docs/metrics.md +++ b/portal/docs/the_fluffy_book/docs/metrics.md @@ -24,7 +24,7 @@ over time and to also visualise them one can use for example Prometheus and Graf The steps on how to set up metrics visualisation with Prometheus and Grafana is explained in [this guide](https://nimbus.guide/metrics-pretty-pictures.html#prometheus-and-grafana). -A Nimbus Portal specific dashboard can be found [here](https://github.com/status-im/nimbus-eth1/blob/master/portal/metrics/grafana/fluffy_grafana_dashboard.json). +A Nimbus Portal specific dashboard can be found [here](https://github.com/status-im/nimbus-eth1/blob/master/portal/metrics/grafana/portal_grafana_dashboard.json). This is the dashboard used for our Nimbus Portal network fleet. In order to use it locally, you will have to remove the diff --git a/portal/metrics/grafana/fluffy_grafana_dashboard.json b/portal/metrics/grafana/portal_grafana_dashboard.json similarity index 53% rename from portal/metrics/grafana/fluffy_grafana_dashboard.json rename to portal/metrics/grafana/portal_grafana_dashboard.json index 8f48d97049..85d487ffb6 100644 --- a/portal/metrics/grafana/fluffy_grafana_dashboard.json +++ b/portal/metrics/grafana/portal_grafana_dashboard.json @@ -21,18 +21,17 @@ } ] }, - "description": "Dashboard for Fluffy, a Portal network client", + "description": "Dashboard for Nimbus Portal client", "editable": true, "fiscalYearStartMonth": 0, "graphTooltip": 0, - "id": 16, + "id": 105, "links": [], - "liveNow": false, "panels": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -69,6 +68,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "last" @@ -76,15 +76,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "time() - process_start_time_seconds{instance=\"${instance}\",container=\"${container}\"}", @@ -99,7 +101,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -138,6 +140,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -145,15 +148,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(process_cpu_seconds_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval]) * 100", @@ -168,7 +173,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -205,6 +210,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -212,15 +218,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "process_resident_memory_bytes{instance=\"${instance}\",container=\"${container}\"}", @@ -235,7 +243,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -272,6 +280,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -279,15 +288,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -302,58 +313,98 @@ "type": "stat" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "description": "Nimbus Portal client version and Nim version", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "description": "Fluffy and Nim version", - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 4, "w": 12, "x": 12, "y": 0 }, - "hiddenSeries": false, "id": 50, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": true, - "expr": "fluffy_version{instance=\"${instance}\",container=\"${container}\"}", + "expr": "nimbus_portal_client_version{instance=\"${instance}\",container=\"${container}\"}", "interval": "", "legendFormat": "{{version}}", "range": true, @@ -362,11 +413,11 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": true, - "expr": "fluffy_nim_version{instance=\"${instance}\",container=\"${container}\"}", + "expr": "nimbus_portal_client_nim_version{instance=\"${instance}\",container=\"${container}\"}", "hide": false, "interval": "", "legendFormat": "{{version}}-{{nim_commit}}", @@ -374,40 +425,13 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "Version", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "logBase": 1, - "show": true - }, - { - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -443,6 +467,7 @@ "graphMode": "area", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -450,15 +475,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "discovery_enr_auto_update_total{instance=\"${instance}\",container=\"${container}\"}", @@ -473,7 +500,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -509,6 +536,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -516,15 +544,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "utp_established_connections{instance=\"${instance}\",container=\"${container}\"}", @@ -539,7 +569,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -575,6 +605,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -582,15 +613,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -607,7 +640,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -643,6 +676,7 @@ "graphMode": "none", "justifyMode": "auto", "orientation": "auto", + "percentChangeColorMode": "standard", "reduceOptions": { "calcs": [ "lastNotNull" @@ -650,15 +684,17 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "auto" + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -673,69 +709,142 @@ "type": "stat" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "Nim GC mem used" + }, + "properties": [ + { + "id": "unit", + "value": "bytes" + }, + { + "id": "custom.axisPlacement", + "value": "right" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "Nim GC mem total" + }, + "properties": [ + { + "id": "unit", + "value": "bytes" + }, + { + "id": "custom.axisPlacement", + "value": "right" + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "RSS" + }, + "properties": [ + { + "id": "unit", + "value": "bytes" + }, + { + "id": "custom.axisPlacement", + "value": "right" + } + ] + } + ] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 4 }, - "hiddenSeries": false, "id": 20, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [ - { - "$$hashKey": "object:2151", - "alias": "Nim GC mem used", - "yaxis": 2 - }, - { - "$$hashKey": "object:2158", - "alias": "Nim GC mem total", - "yaxis": 2 + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true }, - { - "$$hashKey": "object:2159", - "alias": "RSS", - "yaxis": 2 + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" } - ], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(process_cpu_seconds_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval]) * 100", @@ -746,7 +855,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "process_open_fds{instance=\"${instance}\",container=\"${container}\"}", @@ -758,7 +867,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "process_resident_memory_bytes{instance=\"${instance}\",container=\"${container}\"}", @@ -770,7 +879,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "sum(nim_gc_mem_bytes{instance=\"${instance}\",container=\"${container}\"})", @@ -782,7 +891,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -794,86 +903,97 @@ "refId": "E" } ], - "thresholds": [], - "timeRegions": [], "title": "Resource usage", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:2080", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:2081", - "format": "bytes", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 12, "y": 4 }, - "hiddenSeries": false, "id": 6, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(portal_message_requests_outgoing_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -884,7 +1004,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(portal_message_response_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -894,174 +1014,196 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "Portal message requests out / responses in", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:536", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:537", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" - }, - "fill": 1, - "fillGradient": 0, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 12 + "uid": "P6693426190CB2316" }, - "hiddenSeries": false, - "id": 22, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" }, - "exemplar": false, - "expr": "nim_gc_heap_instance_occupied_bytes{instance=\"${instance}\",container=\"${container}\"}", - "interval": "", - "legendFormat": "{{type_name}}", - "refId": "A" - } - ], - "thresholds": [], - "timeRegions": [], - "title": "GC heap objects", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:2326", - "format": "bytes", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:2327", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": { - "type": "prometheus", - "uid": "P5354BB48EF90EBE5" - }, - "fill": 1, - "fillGradient": 0, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 12 + }, + "id": 22, + "options": { + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "exemplar": false, + "expr": "nim_gc_heap_instance_occupied_bytes{instance=\"${instance}\",container=\"${container}\"}", + "interval": "", + "legendFormat": "{{type_name}}", + "refId": "A" + } + ], + "title": "GC heap objects", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, "gridPos": { "h": 8, "w": 12, "x": 12, "y": 12 }, - "hiddenSeries": false, "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(portal_message_requests_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1072,7 +1214,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "", @@ -1082,86 +1224,97 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "Portal message requests in", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:477", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:478", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 9, "w": 12, "x": 0, "y": 20 }, - "hiddenSeries": false, "id": 18, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(discovery_session_lru_cache_hits_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval]) / (rate(discovery_session_lru_cache_hits_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval]) + rate(discovery_session_lru_cache_misses_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval]))", @@ -1173,7 +1326,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(discovery_session_decrypt_failures_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1183,42 +1336,13 @@ "refId": "C" } ], - "thresholds": [], - "timeRegions": [], "title": "Discv5 session cache hit/miss ratio", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:1438", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:1439", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -1251,8 +1375,16 @@ "id": 10, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1262,14 +1394,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_lookup_content_requests_bucket{instance=\"${instance}\",container=\"${container}\"}", @@ -1285,7 +1419,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -1318,8 +1452,16 @@ "id": 14, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1329,14 +1471,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_nodes_enrs_packed_bucket{instance=\"${instance}\",container=\"${container}\"}", @@ -1352,7 +1496,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -1385,8 +1529,16 @@ "id": 12, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1396,14 +1548,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_content_keys_offered_bucket{instance=\"${instance}\",container=\"${container}\"}", @@ -1419,7 +1573,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -1452,8 +1606,16 @@ "id": 16, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1463,14 +1625,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_content_enrs_packed_bucket{instance=\"${instance}\",container=\"${container}\"}", @@ -1484,56 +1648,96 @@ "type": "bargauge" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" - }, - "fill": 1, - "fillGradient": 0, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 29 + "uid": "P6693426190CB2316" }, - "hiddenSeries": false, - "id": 24, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" }, - "exemplar": false, - "expr": "rate(discovery_message_requests_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 29 + }, + "id": 24, + "options": { + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "exemplar": false, + "expr": "rate(discovery_message_requests_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", "interval": "", "legendFormat": "discovery_message_requests_incoming [{{response}}]", "refId": "A" @@ -1541,7 +1745,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(discovery_message_requests_outgoing_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1553,7 +1757,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(discovery_unsolicited_messages_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1563,42 +1767,13 @@ "refId": "C" } ], - "thresholds": [], - "timeRegions": [], "title": "Discv5 message rates", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:2385", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:2386", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -1631,8 +1806,16 @@ "id": 15, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1642,14 +1825,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_content_keys_accepted_bucket{instance=\"${instance}\",container=\"${container}\"}", @@ -1664,54 +1849,94 @@ "type": "bargauge" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "description": "", - "fill": 1, - "fillGradient": 0, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, "gridPos": { "h": 9, "w": 12, "x": 0, "y": 37 }, - "hiddenSeries": false, "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "routing_table_nodes{instance=\"${instance}\",container=\"${container}\"}", @@ -1720,86 +1945,97 @@ "refId": "A" } ], - "thresholds": [], - "timeRegions": [], "title": "Routing table nodes - BUGGED: Metrics count over all routing tables together", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:301", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:302", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 12, "y": 44 }, - "hiddenSeries": false, "id": 44, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(portal_gossip_offers_successful_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1810,7 +2046,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(portal_gossip_offers_failed_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1820,86 +2056,97 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "Neighborhood gossip content offers", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:4139", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:4140", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 46 }, - "hiddenSeries": false, "id": 34, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(utp_allowed_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1910,7 +2157,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(utp_declined_incoming_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -1920,86 +2167,97 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "uTP incoming connections", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:3063", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:3064", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 12, "y": 52 }, - "hiddenSeries": false, "id": 46, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2012,7 +2270,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2026,7 +2284,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2038,86 +2296,97 @@ "refId": "C" } ], - "thresholds": [], - "timeRegions": [], "title": "Neighborhood gossip node lookups", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:97", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:98", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 54 }, - "hiddenSeries": false, "id": 36, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(utp_success_outgoing_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -2128,96 +2397,107 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "exemplar": false, + "expr": "rate(utp_failed_outgoing_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", + "hide": false, + "interval": "", + "legendFormat": "utp_failed_outgoing", + "refId": "B" + } + ], + "title": "uTP outgoing connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] }, - "exemplar": false, - "expr": "rate(utp_failed_outgoing_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", - "hide": false, - "interval": "", - "legendFormat": "utp_failed_outgoing", - "refId": "B" - } - ], - "thresholds": [], - "timeRegions": [], - "title": "uTP outgoing connections", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:3122", - "format": "short", - "logBase": 1, - "show": true + "unit": "short" }, - { - "$$hashKey": "object:3123", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": { - "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 12, "y": 60 }, - "hiddenSeries": false, "id": 8, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "portal_message_decoding_failures_total{instance=\"${instance}\",container=\"${container}\"}", @@ -2226,86 +2506,97 @@ "refId": "A" } ], - "thresholds": [], - "timeRegions": [], "title": "Portal message decoding failures", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:595", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:596", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 62 }, - "hiddenSeries": false, "id": 38, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(utp_failed_packets_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -2316,7 +2607,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "rate(utp_received_packets_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", @@ -2326,42 +2617,13 @@ "refId": "B" } ], - "thresholds": [], - "timeRegions": [], "title": "uTP Packets", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:3181", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:3182", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -2394,8 +2656,16 @@ "id": 52, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2405,14 +2675,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2429,53 +2701,93 @@ "type": "bargauge" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 70 }, - "hiddenSeries": false, "id": 42, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "exemplar": false, "expr": "utp_established_connections{instance=\"${instance}\",container=\"${container}\"}", @@ -2484,42 +2796,13 @@ "refId": "A" } ], - "thresholds": [], - "timeRegions": [], "title": "uTP established connections", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:3811", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:3812", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" }, { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -2527,11 +2810,13 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 0, "gradientMode": "none", @@ -2540,6 +2825,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2589,15 +2875,17 @@ "showLegend": true }, "tooltip": { + "hideZeros": false, "mode": "single", "sort": "none" } }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2610,7 +2898,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2628,7 +2916,7 @@ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "fieldConfig": { "defaults": { @@ -2661,8 +2949,16 @@ "id": 53, "options": { "displayMode": "gradient", + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "maxVizHeight": 300, "minVizHeight": 10, "minVizWidth": 0, + "namePlacement": "auto", "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2672,14 +2968,16 @@ "values": false }, "showUnfilled": true, - "text": {} + "sizing": "auto", + "text": {}, + "valueMode": "color" }, - "pluginVersion": "9.2.5", + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2696,53 +2994,93 @@ "type": "bargauge" }, { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] }, - "fill": 1, - "fillGradient": 0, "gridPos": { "h": 8, "w": 12, "x": 0, "y": 86 }, - "hiddenSeries": false, "id": 56, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "9.2.5", - "pointradius": 2, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", "targets": [ { "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "editorMode": "code", "exemplar": false, @@ -2753,59 +3091,27 @@ "refId": "A" } ], - "thresholds": [], - "timeRegions": [], "title": "Portal poke offers", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "show": true, - "values": [] - }, - "yaxes": [ - { - "$$hashKey": "object:3811", - "format": "short", - "logBase": 1, - "show": true - }, - { - "$$hashKey": "object:3812", - "format": "short", - "logBase": 1, - "show": true - } - ], - "yaxis": { - "align": false - } + "type": "timeseries" } ], + "preload": false, "refresh": "5s", - "schemaVersion": 37, - "style": "dark", + "schemaVersion": 40, "tags": [], "templating": { "list": [ { "current": { - "selected": true, "text": "metal-01.ih-eu-mda1.nimbus.fluffy", "value": "metal-01.ih-eu-mda1.nimbus.fluffy" }, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, "definition": "label_values(portal_message_requests_incoming_total{job=\"nimbus-fluffy-metrics\"}, instance)", - "hide": 0, "includeAll": false, - "multi": false, "name": "instance", "options": [], "query": { @@ -2814,46 +3120,40 @@ }, "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 1, "type": "query" }, { "current": { - "selected": true, "text": "nimbus-fluffy-mainnet-master-01", "value": "nimbus-fluffy-mainnet-master-01" }, "datasource": { "type": "prometheus", - "uid": "P5354BB48EF90EBE5" + "uid": "P6693426190CB2316" }, - "definition": "label_values(portal_message_requests_incoming_total{job=\"nimbus-fluffy-metrics\"}, container)", - "hide": 0, + "definition": "label_values(portal_message_requests_incoming_total{job=\"nimbus-fluffy-metrics\", instance=\"$instance\"}, container)", "includeAll": false, - "multi": false, "name": "container", "options": [], "query": { - "query": "label_values(portal_message_requests_incoming_total{job=\"nimbus-fluffy-metrics\"}, container)", + "query": "label_values(portal_message_requests_incoming_total{job=\"nimbus-fluffy-metrics\", instance=\"$instance\"}, container)", "refId": "StandardVariableQuery" }, "refresh": 1, "regex": "", - "skipUrlSync": false, - "sort": 0, "type": "query" } ] }, "time": { - "from": "now-7d", + "from": "now-24h", "to": "now" }, "timepicker": {}, "timezone": "", - "title": "Nimbus Fluffy Dashboard", + "title": "Nimbus Portal", "uid": "iWQQPuPnkadsf", - "version": 16, + "version": 4, "weekStart": "" -} +} \ No newline at end of file diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index 11b7b24f6f..f5bba6e3aa 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/portal/tests/state_network_tests/test_state_endpoints_vectors.nim b/portal/tests/state_network_tests/test_state_endpoints_vectors.nim index c3c04c5418..8b2dd19f9d 100644 --- a/portal/tests/state_network_tests/test_state_endpoints_vectors.nim +++ b/portal/tests/state_network_tests/test_state_endpoints_vectors.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). diff --git a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim index be3d5a93bc..d2aa9562e9 100644 --- a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim +++ b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim @@ -1,4 +1,4 @@ -# Fluffy +# Nimbus # Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). From ac509d7be456f409f9047d4acb2f0d5ee33341fd Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 21 May 2025 09:28:02 +0800 Subject: [PATCH 027/138] Portal Client: Remove offer workers and support sending rate limited offers (#3303) * Remove offer workers and replace using rate limiter for offers. * Use asyncSpawn for triggerPoke. * Add content queue workers to history and beacon networks. * Test random gossip and neighborhood gossip. --- portal/network/beacon/beacon_network.nim | 22 ++-- portal/network/history/history_network.nim | 27 ++-- portal/network/portal_node.nim | 2 + portal/network/state/state_network.nim | 4 +- portal/network/wire/portal_protocol.nim | 119 +++++++++--------- .../test_portal_wire_protocol.nim | 51 ++++++++ 6 files changed, 145 insertions(+), 80 deletions(-) diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index 138f5a7c45..49911306a2 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -33,10 +33,11 @@ type BeaconNetwork* = ref object getBeaconTime: GetBeaconTimeFn cfg*: RuntimeConfig trustedBlockRoot*: Opt[Eth2Digest] - processContentLoop: Future[void] + processContentLoops: seq[Future[void]] statusLogLoop: Future[void] onEpochLoop: Future[void] onPeriodLoop: Future[void] + contentQueueWorkers: int func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] = ok(toContentId(contentKey)) @@ -198,6 +199,7 @@ proc new*( trustedBlockRoot: Opt[Eth2Digest], bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, + contentQueueWorkers = 8, ): T = let contentQueue = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50) @@ -236,6 +238,7 @@ proc new*( getBeaconTime: getBeaconTime, cfg: cfg, trustedBlockRoot: beaconBlockRoot, + contentQueueWorkers: contentQueueWorkers, ) proc lightClientVerifier( @@ -437,7 +440,7 @@ proc onPeriodLoop(n: BeaconNetwork) {.async: (raises: []).} = except CancelledError: trace "onPeriodLoop canceled" -proc processContentLoop(n: BeaconNetwork) {.async: (raises: []).} = +proc contentQueueWorker(n: BeaconNetwork) {.async: (raises: []).} = try: while true: let (srcNodeId, contentKeys, contentItems) = await n.contentQueue.popFirst() @@ -447,11 +450,11 @@ proc processContentLoop(n: BeaconNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): - asyncSpawn n.portalProtocol.randomGossipDiscardPeers( + await n.portalProtocol.randomGossipDiscardPeers( srcNodeId, contentKeys, contentItems ) except CancelledError: - trace "processContentLoop canceled" + trace "contentQueueWorker canceled" proc statusLogLoop(n: BeaconNetwork) {.async: (raises: []).} = try: @@ -467,7 +470,10 @@ proc start*(n: BeaconNetwork) = info "Starting Portal beacon chain network" n.portalProtocol.start() - n.processContentLoop = processContentLoop(n) + + for i in 0 ..< n.contentQueueWorkers: + n.processContentLoops.add(contentQueueWorker(n)) + n.statusLogLoop = statusLogLoop(n) n.onEpochLoop = onEpochLoop(n) n.onPeriodLoop = onPeriodLoop(n) @@ -478,8 +484,8 @@ proc stop*(n: BeaconNetwork) {.async: (raises: []).} = var futures: seq[Future[void]] futures.add(n.portalProtocol.stop()) - if not n.processContentLoop.isNil(): - futures.add(n.processContentLoop.cancelAndWait()) + for loop in n.processContentLoops: + futures.add(loop.cancelAndWait()) if not n.statusLogLoop.isNil(): futures.add(n.statusLogLoop.cancelAndWait()) @@ -494,5 +500,5 @@ proc stop*(n: BeaconNetwork) {.async: (raises: []).} = n.beaconDb.close() - n.processContentLoop = nil + n.processContentLoops.setLen(0) n.statusLogLoop = nil diff --git a/portal/network/history/history_network.nim b/portal/network/history/history_network.nim index b0ff0a84ac..a4cc8e49fe 100644 --- a/portal/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -38,9 +38,10 @@ type contentQueue*: AsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])] cfg*: RuntimeConfig verifier*: HeaderVerifier - processContentLoop: Future[void] + processContentLoops: seq[Future[void]] statusLogLoop: Future[void] contentRequestRetries: int + contentQueueWorkers: int Block* = (Header, BlockBody) @@ -161,7 +162,7 @@ proc getVerifiedBlockHeader*( n.portalProtocol.storeContent( contentKey, contentId, headerContent.content, cacheContent = true ) - n.portalProtocol.triggerPoke( + asyncSpawn n.portalProtocol.triggerPoke( headerContent.nodesInterestedInContent, contentKey, headerContent.content ) @@ -209,7 +210,7 @@ proc getBlockBody*( n.portalProtocol.storeContent( contentKey, contentId, bodyContent.content, cacheContent = true ) - n.portalProtocol.triggerPoke( + asyncSpawn n.portalProtocol.triggerPoke( bodyContent.nodesInterestedInContent, contentKey, bodyContent.content ) @@ -288,7 +289,7 @@ proc getReceipts*( n.portalProtocol.storeContent( contentKey, contentId, receiptsContent.content, cacheContent = true ) - n.portalProtocol.triggerPoke( + asyncSpawn n.portalProtocol.triggerPoke( receiptsContent.nodesInterestedInContent, contentKey, receiptsContent.content ) @@ -352,6 +353,7 @@ proc new*( bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, contentRequestRetries = 1, + contentQueueWorkers = 8, ): T = let contentQueue = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50) @@ -383,6 +385,7 @@ proc new*( beaconDbCache: beaconDbCache, ), contentRequestRetries: contentRequestRetries, + contentQueueWorkers: contentQueueWorkers, ) proc validateContent( @@ -415,7 +418,7 @@ proc validateContent( return true -proc processContentLoop(n: HistoryNetwork) {.async: (raises: []).} = +proc contentQueueWorker(n: HistoryNetwork) {.async: (raises: []).} = try: while true: let (srcNodeId, contentKeys, contentItems) = await n.contentQueue.popFirst() @@ -425,11 +428,11 @@ proc processContentLoop(n: HistoryNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): - asyncSpawn n.portalProtocol.neighborhoodGossipDiscardPeers( + await n.portalProtocol.neighborhoodGossipDiscardPeers( srcNodeId, contentKeys, contentItems ) except CancelledError: - trace "processContentLoop canceled" + trace "contentQueueWorker canceled" proc statusLogLoop(n: HistoryNetwork) {.async: (raises: []).} = try: @@ -449,7 +452,9 @@ proc start*(n: HistoryNetwork) = n.portalProtocol.start() - n.processContentLoop = processContentLoop(n) + for i in 0 ..< n.contentQueueWorkers: + n.processContentLoops.add(contentQueueWorker(n)) + n.statusLogLoop = statusLogLoop(n) proc stop*(n: HistoryNetwork) {.async: (raises: []).} = @@ -458,11 +463,11 @@ proc stop*(n: HistoryNetwork) {.async: (raises: []).} = var futures: seq[Future[void]] futures.add(n.portalProtocol.stop()) - if not n.processContentLoop.isNil: - futures.add(n.processContentLoop.cancelAndWait()) + for loop in n.processContentLoops: + futures.add(loop.cancelAndWait()) if not n.statusLogLoop.isNil: futures.add(n.statusLogLoop.cancelAndWait()) await noCancel(allFutures(futures)) - n.processContentLoop = nil + n.processContentLoops.setLen(0) n.statusLogLoop = nil diff --git a/portal/network/portal_node.nim b/portal/network/portal_node.nim index b534552c83..b0bb6c9ae4 100644 --- a/portal/network/portal_node.nim +++ b/portal/network/portal_node.nim @@ -119,6 +119,7 @@ proc new*( config.trustedBlockRoot, bootstrapRecords = bootstrapRecords, portalConfig = config.portalConfig, + contentQueueWorkers = config.contentQueueWorkers, ) Opt.some(beaconNetwork) else: @@ -142,6 +143,7 @@ proc new*( bootstrapRecords = bootstrapRecords, portalConfig = config.portalConfig, contentRequestRetries = config.contentRequestRetries, + contentQueueWorkers = config.contentQueueWorkers, ) ) else: diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index f5bba6e3aa..a300e00a5d 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -134,7 +134,7 @@ proc getContent( interestedNodesCount = lookupRes.nodesInterestedInContent.len() let offer = contentValue.toOffer(maybeParentOffer.get()) - n.portalProtocol.triggerPoke( + asyncSpawn n.portalProtocol.triggerPoke( lookupRes.nodesInterestedInContent, contentKeyBytes, offer.encode() ) @@ -263,7 +263,7 @@ proc contentQueueWorker(n: StateNetwork) {.async: (raises: []).} = error "Received offered content failed validation", srcNodeId, contentKeyBytes, error = offerRes.error() except CancelledError: - trace "processContentLoop canceled" + trace "contentQueueWorker canceled" proc statusLogLoop(n: StateNetwork) {.async: (raises: []).} = try: diff --git a/portal/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim index 58cbfbad6b..ef35795afc 100644 --- a/portal/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -16,6 +16,7 @@ import results, chronicles, chronos, + chronos/ratelimit, nimcrypto/hash, bearssl, ssz_serialization, @@ -206,12 +207,11 @@ type revalidateLoop: Future[void] stream*: PortalStream radiusCache: RadiusCache - offerQueue: AsyncQueue[OfferRequest] - offerWorkers: seq[Future[void]] offerCache*: OfferCache pingTimings: Table[NodeId, chronos.Moment] config*: PortalProtocolConfig pingExtensionCapabilities*: set[uint16] + offerTokenBucket: TokenBucket PortalResult*[T] = Result[T, string] @@ -773,12 +773,14 @@ proc new*( bootstrapRecords: @bootstrapRecords, stream: stream, radiusCache: RadiusCache.init(config.radiusCacheSize), - offerQueue: newAsyncQueue[OfferRequest](config.maxConcurrentOffers), offerCache: OfferCache.init(if config.disableOfferCache: 0 else: config.offerCacheSize), pingTimings: Table[NodeId, chronos.Moment](), config: config, pingExtensionCapabilities: pingExtensionCapabilities, + # 0 seconds here indicates no timeout on the TokenBucket which means we need + # to manually call replenish to return tokens to the bucket after usage. + offerTokenBucket: TokenBucket.new(config.maxConcurrentOffers, 0.seconds), ) proto.baseProtocol.registerTalkProtocol(@(proto.protocolId), proto).expect( @@ -1218,18 +1220,25 @@ proc offer*( let req = OfferRequest(dst: dst, kind: Direct, contentList: contentList) await p.offer(req) -proc offerWorker(p: PortalProtocol) {.async: (raises: [CancelledError]).} = - while true: - let req = await p.offerQueue.popFirst() +proc offerRateLimited*( + p: PortalProtocol, offer: OfferRequest +): Future[PortalResult[ContentKeysAcceptList]] {.async: (raises: [CancelledError]).} = + try: + await p.offerTokenBucket.consume(1) + except CancelledError as e: + raise e + except CatchableError as e: + raiseAssert(e.msg) # Shouldn't happen + + let res = await p.offer(offer) + if res.isOk(): + portal_gossip_offers_successful.inc(labelValues = [$p.protocolId]) + else: + portal_gossip_offers_failed.inc(labelValues = [$p.protocolId]) - let res = await p.offer(req) - if res.isOk(): - portal_gossip_offers_successful.inc(labelValues = [$p.protocolId]) - else: - portal_gossip_offers_failed.inc(labelValues = [$p.protocolId]) + p.offerTokenBucket.replenish(1) -proc offerQueueEmpty*(p: PortalProtocol): bool = - p.offerQueue.empty() + res proc lookupWorker( p: PortalProtocol, dst: Node, target: NodeId @@ -1320,7 +1329,7 @@ proc triggerPoke*( nodes: seq[Node], contentKey: ContentKeyByteList, content: seq[byte], -) = +): Future[void] {.async: (raises: [CancelledError]).} = ## In order to properly test gossip mechanisms (e.g. in Portal Hive), ## we need the option to turn off the POKE functionality as it influences ## how data moves around the network. @@ -1329,19 +1338,21 @@ proc triggerPoke*( ## Triggers asynchronous offer-accept interaction to provided nodes. ## Provided content should be in range of provided nodes. for node in nodes: - if not p.offerQueue.full(): - try: - let - contentKV = ContentKV(contentKey: contentKey, content: content) - list = List[ContentKV, contentKeysLimit].init(@[contentKV]) - req = OfferRequest(dst: node, kind: Direct, contentList: list) - p.offerQueue.putNoWait(req) - portal_poke_offers.inc(labelValues = [$p.protocolId]) - except AsyncQueueFullError as e: - # Should not occur as full() check is done. - raiseAssert(e.msg) + if p.offerTokenBucket.tryConsume(1): + # tryConsume actually deducts tokens and there is currently + # no API to check the remaining capacity of the bucket so we just + # add the token back here + p.offerTokenBucket.replenish(1) + + let + contentKV = ContentKV(contentKey: contentKey, content: content) + list = List[ContentKV, contentKeysLimit].init(@[contentKV]) + req = OfferRequest(dst: node, kind: Direct, contentList: list) + discard await p.offerRateLimited(req) + + portal_poke_offers.inc(labelValues = [$p.protocolId]) else: - # Offer queue is full, do not start more offer-accept interactions + # The offerTokenBucket is at capacity so do not start more offer-accept interactions return # TODO ContentLookup and Lookup look almost exactly the same, also lookups in other @@ -1731,6 +1742,18 @@ proc queryRandom*( ## Perform a query for a random target, return all nodes discovered. p.query(NodeId.random(p.baseProtocol.rng[])) +proc offerBatchGetPeerCount*( + p: PortalProtocol, offers: seq[OfferRequest] +): Future[int] {.async: (raises: [CancelledError]).} = + let futs = await allFinished(offers.mapIt(p.offerRateLimited(it))) + + var peerCount = 0 + for f in futs: + if f.completed() and f.value().isOk(): + inc peerCount # only count successful offers + + peerCount + proc neighborhoodGossip*( p: PortalProtocol, srcNodeId: Opt[NodeId], @@ -1787,7 +1810,7 @@ proc neighborhoodGossip*( # first for the same request. p.baseProtocol.rng[].shuffle(closestLocalNodes) - var numberOfGossipedNodes = 0 + var offers = newSeqOfCap[OfferRequest](p.config.maxGossipNodes) if not enableNodeLookup or closestLocalNodes.len() >= p.config.maxGossipNodes: # use local nodes for gossip @@ -1795,10 +1818,9 @@ proc neighborhoodGossip*( for node in closestLocalNodes: let req = OfferRequest(dst: node, kind: Direct, contentList: contentList) - await p.offerQueue.addLast(req) - inc numberOfGossipedNodes + offers.add(req) - if numberOfGossipedNodes >= p.config.maxGossipNodes: + if offers.len() >= p.config.maxGossipNodes: break else: # use looked up nodes for gossip portal_gossip_with_lookup.inc(labelValues = [$p.protocolId]) @@ -1817,13 +1839,12 @@ proc neighborhoodGossip*( # Only send offers to nodes for which the content is in range of their radius if p.inRange(node.id, radius, contentId): let req = OfferRequest(dst: node, kind: Direct, contentList: contentList) - await p.offerQueue.addLast(req) - inc numberOfGossipedNodes + offers.add(req) - if numberOfGossipedNodes >= p.config.maxGossipNodes: - break + if offers.len() >= p.config.maxGossipNodes: + break - return numberOfGossipedNodes + await p.offerBatchGetPeerCount(offers) proc neighborhoodGossipDiscardPeers*( p: PortalProtocol, @@ -1850,12 +1871,11 @@ proc randomGossip*( let contentKV = ContentKV(contentKey: contentKeys[i], content: contentItem) discard contentList.add(contentKV) - let nodes = p.routingTable.randomNodes(p.config.maxGossipNodes) + let + nodes = p.routingTable.randomNodes(p.config.maxGossipNodes) + offers = nodes.mapIt(OfferRequest(dst: it, kind: Direct, contentList: contentList)) - for node in nodes[0 ..< nodes.len()]: - let req = OfferRequest(dst: node, kind: Direct, contentList: contentList) - await p.offerQueue.addLast(req) - return nodes.len() + await p.offerBatchGetPeerCount(offers) proc randomGossipDiscardPeers*( p: PortalProtocol, @@ -2017,21 +2037,6 @@ proc start*(p: PortalProtocol) = p.refreshLoop = refreshLoop(p) p.revalidateLoop = revalidateLoop(p) - # These are the concurrent offers per Portal wire protocol that is running. - # Using the `offerQueue` allows for limiting the amount of offers send and - # thus how many streams can be started. - # TODO: - # More thought needs to go into this as it is currently on a per network - # basis. Keep it simple like that? Or limit it better at the stream transport - # level? In the latter case, this might still need to be checked/blocked at - # the very start of sending the offer, because blocking/waiting too long - # between the received accept message and actually starting the stream and - # sending data could give issues due to timeouts on the other side. - # And then there are still limits to be applied also for FindContent and the - # incoming directions. - for i in 0 ..< p.config.maxConcurrentOffers: - p.offerWorkers.add(offerWorker(p)) - proc stop*(p: PortalProtocol) {.async: (raises: []).} = var futures: seq[Future[void]] @@ -2040,14 +2045,10 @@ proc stop*(p: PortalProtocol) {.async: (raises: []).} = if not p.refreshLoop.isNil(): futures.add(p.refreshLoop.cancelAndWait()) - for worker in p.offerWorkers: - futures.add(worker.cancelAndWait()) - await noCancel(allFutures(futures)) p.revalidateLoop = nil p.refreshLoop = nil - p.offerWorkers = @[] proc resolve*( p: PortalProtocol, id: NodeId diff --git a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim index d2aa9562e9..5748f94b76 100644 --- a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim +++ b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim @@ -271,6 +271,57 @@ procSuite "Portal Wire Protocol Tests": await proto1.stopPortalProtocol() await proto2.stopPortalProtocol() + asyncTest "Neighborhood gossip - single content key, value": + let (proto1, proto2) = defaultTestSetup(rng) + + check proto1.addNode(proto2.localNode) == Added + let pong = await proto1.ping(proto2.localNode) + check pong.isOk() + + let + contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]) + content: seq[seq[byte]] = @[@[byte 0x04, 0x05, 0x06]] + + let peerCount = + await proto1.neighborhoodGossip(Opt.none(NodeId), contentKeys, content) + check peerCount == 1 + + let (srcNodeId, keys, items) = await proto2.stream.contentQueue.popFirst() + check: + srcNodeId.get() == proto1.localNode.id + keys.len() == items.len() + keys.len() == 1 + keys == contentKeys + items == content + + await proto1.stopPortalProtocol() + await proto2.stopPortalProtocol() + + asyncTest "Random gossip - single content key, value": + let (proto1, proto2) = defaultTestSetup(rng) + + check proto1.addNode(proto2.localNode) == Added + let pong = await proto1.ping(proto2.localNode) + check pong.isOk() + + let + contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]) + content: seq[seq[byte]] = @[@[byte 0x04, 0x05, 0x06]] + + let peerCount = await proto1.randomGossip(Opt.none(NodeId), contentKeys, content) + check peerCount == 1 + + let (srcNodeId, keys, items) = await proto2.stream.contentQueue.popFirst() + check: + srcNodeId.get() == proto1.localNode.id + keys.len() == items.len() + keys.len() == 1 + keys == contentKeys + items == content + + await proto1.stopPortalProtocol() + await proto2.stopPortalProtocol() + asyncTest "Correctly mark node as seen after request": let (proto1, proto2) = defaultTestSetup(rng) From db4ded3d6333fc64d20451db5ebf1fc096835736 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 21 May 2025 23:02:00 +0800 Subject: [PATCH 028/138] Portal Client: Add debug parameter to generate node id having a given prefix (#3313) --- portal/client/nimbus_portal_client.nim | 3 +- portal/client/nimbus_portal_client_conf.nim | 13 ++++++ portal/common/common_utils.nim | 46 +++++++++++++++++++-- 3 files changed, 57 insertions(+), 5 deletions(-) diff --git a/portal/client/nimbus_portal_client.nim b/portal/client/nimbus_portal_client.nim index 9939fc344a..9a87f4dc28 100644 --- a/portal/client/nimbus_portal_client.nim +++ b/portal/client/nimbus_portal_client.nim @@ -128,7 +128,8 @@ proc run(portalClient: PortalClient, config: PortalConf) {.raises: [CatchableErr if config.networkKey.isSome(): (config.networkKey.get(), true) else: - getPersistentNetKey(rng[], config.networkKeyFile) + let nodeIdPrefixHex = config.networkKeyNodeIdPrefix.get("") + getPersistentNetKey(rng[], config.networkKeyFile, nodeIdPrefixHex) enrFilePath = dataDir / enrFileName previousEnr = diff --git a/portal/client/nimbus_portal_client_conf.nim b/portal/client/nimbus_portal_client_conf.nim index d229c5ebca..f4894f0d41 100644 --- a/portal/client/nimbus_portal_client_conf.nim +++ b/portal/client/nimbus_portal_client_conf.nim @@ -166,6 +166,19 @@ type name: "netkey-unsafe" .}: Option[PrivateKey] + networkKeyNodeIdPrefix* {. + hidden, + desc: + "If an existing network key is not found, then generate a new private key " & + "(secp256k1) which has a node id where the most significant bits match the " & + "specified prefix (in hex). Between 2 and 8 hex characters are supported " & + "(excluding the 0x) but generally no more than 4 characters are recommended " & + "because otherwise the generation process is very slow.", + defaultValue: none(string), + defaultValueDesc: "none", + name: "debug-netkey-nodeid-prefix-unsafe" + .}: Option[string] + accumulatorFile* {. desc: "Get the master accumulator snapshot from a file containing an " & diff --git a/portal/common/common_utils.nim b/portal/common/common_utils.nim index 7b0dcb3080..4925d5613c 100644 --- a/portal/common/common_utils.nim +++ b/portal/common/common_utils.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -7,7 +7,12 @@ {.push raises: [].} -import std/[os, strutils], chronicles, stew/io2, eth/p2p/discoveryv5/enr +import + std/[os, strutils], + eth/common/hashes, + chronicles, + stew/[io2, byteutils], + eth/p2p/discoveryv5/enr iterator strippedLines(filename: string): string {.raises: [ref IOError].} = for line in lines(filename): @@ -40,13 +45,41 @@ proc loadBootstrapFile*(bootstrapFile: string, bootstrapEnrs: var seq[Record]) = fatal "Unknown bootstrap file format", ext quit 1 +# With this we can generate node ids at specific locations in the keyspace. +# Note: This should only be used for testing and debugging purposes. +proc generateNetKeyHavingNodeIdPrefix*( + rng: var HmacDrbgContext, prefixHex: string +): PrivateKey = + let prefixBytes = + try: + prefixHex.hexToSeqByte() + except ValueError as e: + raiseAssert(e.msg) + + doAssert(prefixBytes.len() >= 1 and prefixBytes.len() <= 4) + + while true: + let + privKey = PrivateKey.random(rng) + pubKey = privKey.toPublicKey.toRaw() + nodeIdBytes = keccak256(pubKey).data + + var matching = true + for i, b in prefixBytes: + if nodeIdBytes[i] != b: + matching = false + break + + if matching: + return privKey + # Note: # Currently just works with the network private key stored as hex in a file. # In the future it would be nice to re-use keystore from nimbus-eth2 for this. # However that would require the pull the keystore.nim and parts of # keystore_management.nim out of nimbus-eth2. proc getPersistentNetKey*( - rng: var HmacDrbgContext, keyFilePath: string + rng: var HmacDrbgContext, keyFilePath: string, nodeIdPrefixHex: string ): tuple[key: PrivateKey, newNetKey: bool] = logScope: key_file = keyFilePath @@ -73,7 +106,12 @@ proc getPersistentNetKey*( quit QuitFailure else: info "Network key file is missing, creating a new one" - let key = PrivateKey.random(rng) + + let key = + if nodeIdPrefixHex.len() > 0: + generateNetKeyHavingNodeIdPrefix(rng, nodeIdPrefixHex) + else: + PrivateKey.random(rng) if (let res = io2.writeFile(keyFilePath, $key); res.isErr): fatal "Failed to write the network key file", error = ioErrorMsg(res.error) From 9334684374e3634d3d84a4ecc01e689607b4da3f Mon Sep 17 00:00:00 2001 From: Chirag Parmar Date: Thu, 22 May 2025 11:52:11 +0530 Subject: [PATCH 029/138] proxy: swap block cache for header store (#3288) * swap block cache for header store; refactor * format * review and fixes * add tests for header store * remove unused headers * review and fixes * fixes * fix copyright info * fix copyright year * check order * earliest finalized * make cache len hidden --- nimbus.nimble | 2 +- nimbus_verified_proxy/block_cache.nim | 48 ----- nimbus_verified_proxy/header_store.nim | 178 ++++++++++++++++++ .../nimbus_verified_proxy.nim | 160 +++++++--------- .../nimbus_verified_proxy_conf.nim | 11 +- nimbus_verified_proxy/rpc/rpc_eth_api.nim | 67 +++---- .../tests/all_proxy_tests.nim | 8 + .../tests/test_header_store.nim | 126 +++++++++++++ 8 files changed, 423 insertions(+), 177 deletions(-) delete mode 100644 nimbus_verified_proxy/block_cache.nim create mode 100644 nimbus_verified_proxy/header_store.nim create mode 100644 nimbus_verified_proxy/tests/all_proxy_tests.nim create mode 100644 nimbus_verified_proxy/tests/test_header_store.nim diff --git a/nimbus.nimble b/nimbus.nimble index 667342e3e1..8a3484a469 100644 --- a/nimbus.nimble +++ b/nimbus.nimble @@ -125,7 +125,7 @@ task nimbus_verified_proxy, "Build Nimbus verified proxy": buildBinary "nimbus_verified_proxy", "nimbus_verified_proxy/", "-d:chronicles_log_level=TRACE" task nimbus_verified_proxy_test, "Run Nimbus verified proxy tests": - test "nimbus_verified_proxy/tests", "test_proof_validation", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" + test "nimbus_verified_proxy/tests", "all_proxy_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" task build_fuzzers, "Build fuzzer test cases": # This file is there to be able to quickly build the fuzzer test cases in diff --git a/nimbus_verified_proxy/block_cache.nim b/nimbus_verified_proxy/block_cache.nim deleted file mode 100644 index bc3430c6ee..0000000000 --- a/nimbus_verified_proxy/block_cache.nim +++ /dev/null @@ -1,48 +0,0 @@ -# nimbus_verified_proxy -# Copyright (c) 2022-2024 Status Research & Development GmbH -# Licensed and distributed under either of -# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). -# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). -# at your option. This file may not be copied, modified, or distributed except according to those terms. - -{.push raises: [].} - -import eth/common/hashes, web3/eth_api_types, minilru, results - -## Cache for payloads received through block gossip and validated by the -## consensus light client. -## The payloads are stored in order of arrival. When the cache is full, the -## oldest payload is deleted first. -type BlockCache* = ref object - blocks: LruCache[Hash32, BlockObject] - -proc new*(T: type BlockCache, max: uint32): T = - let maxAsInt = int(max) - BlockCache(blocks: LruCache[Hash32, BlockObject].init(maxAsInt)) - -func len*(self: BlockCache): int = - len(self.blocks) - -func isEmpty*(self: BlockCache): bool = - len(self.blocks) == 0 - -proc add*(self: BlockCache, payload: BlockObject) = - # Only add if it didn't exist before - the implementation of `latest` relies - # on this.. - if payload.hash notin self.blocks: - self.blocks.put(payload.hash, payload) - -proc latest*(self: BlockCache): Opt[BlockObject] = - for b in self.blocks.values: - return Opt.some(b) - Opt.none(BlockObject) - -proc getByNumber*(self: BlockCache, number: Quantity): Opt[BlockObject] = - for b in self.blocks.values: - if b.number == number: - return Opt.some(b) - - Opt.none(BlockObject) - -proc getPayloadByHash*(self: BlockCache, hash: Hash32): Opt[BlockObject] = - self.blocks.get(hash) diff --git a/nimbus_verified_proxy/header_store.nim b/nimbus_verified_proxy/header_store.nim new file mode 100644 index 0000000000..80887b3b43 --- /dev/null +++ b/nimbus_verified_proxy/header_store.nim @@ -0,0 +1,178 @@ +# nimbus_verified_proxy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import + eth/common/[hashes, headers], + web3/eth_api_types, + std/tables, + beacon_chain/spec/beaconstate, + beacon_chain/spec/datatypes/[phase0, altair, bellatrix], + beacon_chain/[light_client, nimbus_binary_common], + beacon_chain/el/engine_api_conversions, + minilru, + results + +type HeaderStore* = ref object + headers: LruCache[Hash32, Header] + hashes: LruCache[base.BlockNumber, Hash32] + finalized: Opt[Header] + finalizedHash: Opt[Hash32] + earliest: Opt[Header] + earliestHash: Opt[Hash32] + +func convLCHeader*(lcHeader: ForkedLightClientHeader): Result[Header, string] = + withForkyHeader(lcHeader): + template p(): auto = + forkyHeader.execution + + when lcDataFork >= LightClientDataFork.Capella: + let withdrawalsRoot = Opt.some(p.withdrawals_root.asBlockHash) + else: + const withdrawalsRoot = Opt.none(Hash32) + + when lcDataFork >= LightClientDataFork.Deneb: + let + blobGasUsed = Opt.some(p.blob_gas_used) + excessBlobGas = Opt.some(p.excess_blob_gas) + parentBeaconBlockRoot = Opt.some(forkyHeader.beacon.parent_root.asBlockHash) + else: + const + blobGasUsed = Opt.none(uint64) + excessBlobGas = Opt.none(uint64) + parentBeaconBlockRoot = Opt.none(Hash32) + + when lcDataFork >= LightClientDataFork.Electra: + # INFO: there is no visibility of the execution requests hash in light client header + let requestsHash = Opt.none(Hash32) + else: + const requestsHash = Opt.none(Hash32) + + when lcDataFork > LightClientDataFork.Altair: + let h = Header( + parentHash: p.parent_hash.asBlockHash, + ommersHash: EMPTY_UNCLE_HASH, + coinbase: addresses.Address(p.fee_recipient.data), + stateRoot: p.state_root.asBlockHash, + transactionsRoot: p.transactions_root.asBlockHash, + receiptsRoot: p.receipts_root.asBlockHash, + logsBloom: FixedBytes[BYTES_PER_LOGS_BLOOM](p.logs_bloom.data), + difficulty: DifficultyInt(0.u256), + number: base.BlockNumber(p.block_number), + gasLimit: GasInt(p.gas_limit), + gasUsed: GasInt(p.gas_used), + timestamp: EthTime(p.timestamp), + extraData: seq[byte](p.extra_data), + mixHash: p.prev_randao.data.to(Bytes32), + nonce: default(Bytes8), + baseFeePerGas: Opt.some(p.base_fee_per_gas), + withdrawalsRoot: withdrawalsRoot, + blobGasUsed: blobGasUsed, + excessBlobGas: excessBlobGas, + parentBeaconBlockRoot: parentBeaconBlockRoot, + requestsHash: requestsHash, + ) + return ok(h) + else: + # running verified proxy for altair doesn't make sense + return err("pre-bellatrix light client headers do not have execution header") + +func new*(T: type HeaderStore, max: int): T = + HeaderStore( + headers: LruCache[Hash32, Header].init(max), + hashes: LruCache[base.BlockNumber, Hash32].init(max), + finalized: Opt.none(Header), + finalizedHash: Opt.none(Hash32), + earliest: Opt.none(Header), + earliestHash: Opt.none(Hash32), + ) + +func len*(self: HeaderStore): int = + len(self.headers) + +func isEmpty*(self: HeaderStore): bool = + len(self.headers) == 0 + +func latest*(self: HeaderStore): Opt[Header] = + for h in self.headers.values: + return Opt.some(h) + + Opt.none(Header) + +func earliest*(self: HeaderStore): Opt[Header] = + self.earliest + +func earliestHash*(self: HeaderStore): Opt[Hash32] = + self.earliestHash + +func finalized*(self: HeaderStore): Opt[Header] = + self.finalized + +func finalizedHash*(self: HeaderStore): Opt[Hash32] = + self.finalizedHash + +proc updateFinalized*( + self: HeaderStore, header: ForkedLightClientHeader +): Result[bool, string] = + let execHeader = convLCHeader(header).valueOr: + return err(error) + + withForkyHeader(header): + when lcDataFork > LightClientDataFork.Altair: + let execHash = forkyHeader.execution.block_hash.asBlockHash + + if self.finalized.isSome(): + if self.finalized.get().number < execHeader.number: + self.finalized = Opt.some(execHeader) + self.finalizedHash = Opt.some(execHash) + else: + return err("finalized update header is older") + else: + self.finalized = Opt.some(execHeader) + self.finalizedHash = Opt.some(execHash) + self.earliest = Opt.some(execHeader) + self.earliestHash = Opt.some(execHash) + + return ok(true) + +proc add*(self: HeaderStore, header: ForkedLightClientHeader): Result[bool, string] = + let + execHeader = convLCHeader(header).valueOr: + return err(error) + latestHeader = self.latest + + # check the ordering of headers. This allows for gaps but always maintains an incremental order + if latestHeader.isSome(): + if execHeader.number <= latestHeader.get().number: + return err("block is older than the latest one") + + withForkyHeader(header): + when lcDataFork > LightClientDataFork.Altair: + let execHash = forkyHeader.execution.block_hash.asBlockHash + + # Only add if it didn't exist before - the implementation of `latest` relies + # on this.. + if execHash notin self.headers: + self.headers.put(execHash, execHeader) + self.hashes.put(execHeader.number, execHash) + ok(true) + +func latestHash*(self: HeaderStore): Opt[Hash32] = + for hash in self.headers.keys: + return Opt.some(hash) + + Opt.none(Hash32) + +func get*(self: HeaderStore, number: base.BlockNumber): Opt[Header] = + let hash = self.hashes.peek(number).valueOr: + return Opt.none(Header) + + return self.headers.peek(hash) + +func get*(self: HeaderStore, hash: Hash32): Opt[Header] = + self.headers.peek(hash) diff --git a/nimbus_verified_proxy/nimbus_verified_proxy.nim b/nimbus_verified_proxy/nimbus_verified_proxy.nim index 0446328120..4a8f47ef48 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy.nim @@ -14,18 +14,18 @@ import confutils, eth/common/[keys, eth_types_rlp], json_rpc/rpcproxy, - beacon_chain/el/[el_manager, engine_api_conversions], + beacon_chain/el/[el_manager], beacon_chain/gossip_processing/optimistic_processor, beacon_chain/networking/network_metadata, beacon_chain/networking/topic_params, beacon_chain/spec/beaconstate, beacon_chain/spec/datatypes/[phase0, altair, bellatrix], beacon_chain/[light_client, nimbus_binary_common, version], - ../execution_chain/rpc/[cors, rpc_utils], - ../execution_chain/beacon/payload_conv, + ../execution_chain/rpc/cors, + ../execution_chain/common/common, ./rpc/rpc_eth_api, ./nimbus_verified_proxy_conf, - ./block_cache + ./header_store from beacon_chain/gossip_processing/eth2_processor import toValidationResult @@ -57,13 +57,6 @@ func getConfiguredChainId(networkMetadata: Eth2NetworkMetadata): UInt256 = proc run*( config: VerifiedProxyConf, ctx: ptr Context ) {.raises: [CatchableError], gcsafe.} = - var headerCallback: OnHeaderCallback - if ctx != nil: - headerCallback = ctx.onHeader - - # Required as both Eth2Node and LightClient requires correct config type - var lcConfig = config.asLightClientConf() - {.gcsafe.}: setupLogging(config.logLevel, config.logStdout, none(OutFile)) @@ -73,16 +66,33 @@ proc run*( except Exception: notice "commandLineParams() exception" + # load constants and metadata for the selected chain + let metadata = loadEth2Network(config.eth2Network) + + # initialize verified proxy let - metadata = loadEth2Network(config.eth2Network) chainId = getConfiguredChainId(metadata) + authHooks = @[httpCors(@[])] # TODO: for now we serve all cross origin requests + # TODO: write a comment + clientConfig = config.web3url.asClientConfig() - for node in metadata.bootstrapNodes: - lcConfig.bootstrapNodes.add node + rpcProxy = RpcProxy.new( + [initTAddress(config.rpcAddress, config.rpcPort)], clientConfig, authHooks + ) + + # header cache contains headers downloaded from p2p + headerStore = HeaderStore.new(config.cacheLen) + + var verifiedProxy = VerifiedRpcProxy.new(rpcProxy, headerStore, chainId) + # add handlers that verify RPC calls /rpc/rpc_eth_api.nim + verifiedProxy.installEthApiHandlers() + + # just for short hand convenience template cfg(): auto = metadata.cfg + # initialize beacon node genesis data, beacon clock and forkDigests let genesisState = try: @@ -97,11 +107,14 @@ proc run*( except CatchableError as err: raiseAssert "Invalid baked-in state: " & err.msg + # getStateField reads seeks info directly from a byte array + # get genesis time and instantiate the beacon clock genesisTime = getStateField(genesisState[], genesis_time) beaconClock = BeaconClock.init(genesisTime).valueOr: error "Invalid genesis time in state", genesisTime quit QuitFailure + # get the function that itself get the current beacon time getBeaconTime = beaconClock.getBeaconTimeFn() genesis_validators_root = getStateField(genesisState[], genesis_validators_root) @@ -109,6 +122,13 @@ proc run*( genesisBlockRoot = get_initial_beacon_block(genesisState[]).root + # transform the config to fit as a light client config and as a p2p node(Eth2Node) config + var lcConfig = config.asLightClientConf() + for node in metadata.bootstrapNodes: + lcConfig.bootstrapNodes.add node + + # create new network keys, create a p2p node(Eth2Node) and create a light client + let rng = keys.newRng() netKeys = getRandomNetKeys(rng[]) @@ -117,114 +137,68 @@ proc run*( rng, lcConfig, netKeys, cfg, forkDigests, getBeaconTime, genesis_validators_root ) - blockCache = BlockCache.new(uint32(64)) - - # TODO: for now we serve all cross origin requests - authHooks = @[httpCors(@[])] - - clientConfig = config.web3url.asClientConfig() - - rpcProxy = RpcProxy.new( - [initTAddress(config.rpcAddress, config.rpcPort)], clientConfig, authHooks - ) - - verifiedProxy = VerifiedRpcProxy.new(rpcProxy, blockCache, chainId) - - optimisticHandler = proc( - signedBlock: ForkedSignedBeaconBlock - ) {.async: (raises: [CancelledError]).} = - notice "New LC optimistic block", - opt = signedBlock.toBlockId(), wallSlot = getBeaconTime().slotOrZero - withBlck(signedBlock): - when consensusFork >= ConsensusFork.Bellatrix: - if forkyBlck.message.is_execution_block: - template payload(): auto = - forkyBlck.message.body - - try: - # TODO parentBeaconBlockRoot / requestsHash - let blk = ethBlock( - executionPayload(payload.asEngineExecutionPayload()), - parentBeaconBlockRoot = Opt.none(Hash32), - requestsHash = Opt.none(Hash32), - ) - blockCache.add(populateBlockObject(blk.header.rlpHash, blk, 0.u256, true)) - except RlpError as exc: - debug "Invalid block received", err = exc.msg - - optimisticProcessor = initOptimisticProcessor(getBeaconTime, optimisticHandler) - + # light client is set to optimistic finalization mode lightClient = createLightClient( network, rng, lcConfig, cfg, forkDigests, getBeaconTime, genesis_validators_root, LightClientFinalizationMode.Optimistic, ) - verifiedProxy.installEthApiHandlers() - - info "Listening to incoming network requests" + # registerbasic p2p protocols for maintaing peers ping/status/get_metadata/... etc. network.registerProtocol( PeerSync, PeerSync.NetworkState.init(cfg, forkDigests, genesisBlockRoot, getBeaconTime), ) - network.addValidator( - getBeaconBlocksTopic(forkDigests.phase0), - proc(signedBlock: phase0.SignedBeaconBlock): ValidationResult = - toValidationResult(optimisticProcessor.processSignedBeaconBlock(signedBlock)), - ) - network.addValidator( - getBeaconBlocksTopic(forkDigests.altair), - proc(signedBlock: altair.SignedBeaconBlock): ValidationResult = - toValidationResult(optimisticProcessor.processSignedBeaconBlock(signedBlock)), - ) - network.addValidator( - getBeaconBlocksTopic(forkDigests.bellatrix), - proc(signedBlock: bellatrix.SignedBeaconBlock): ValidationResult = - toValidationResult(optimisticProcessor.processSignedBeaconBlock(signedBlock)), - ) - network.addValidator( - getBeaconBlocksTopic(forkDigests.capella), - proc(signedBlock: capella.SignedBeaconBlock): ValidationResult = - toValidationResult(optimisticProcessor.processSignedBeaconBlock(signedBlock)), - ) - network.addValidator( - getBeaconBlocksTopic(forkDigests.deneb), - proc(signedBlock: deneb.SignedBeaconBlock): ValidationResult = - toValidationResult(optimisticProcessor.processSignedBeaconBlock(signedBlock)), - ) - lightClient.installMessageValidators() + # start the p2p network and rpcProxy waitFor network.startListening() waitFor network.start() waitFor rpcProxy.start() + + # verify chain id that the proxy is connected to waitFor verifiedProxy.verifyChaindId() proc onFinalizedHeader( lightClient: LightClient, finalizedHeader: ForkedLightClientHeader ) = withForkyHeader(finalizedHeader): - when lcDataFork > LightClientDataFork.None: + when lcDataFork > LightClientDataFork.Altair: info "New LC finalized header", finalized_header = shortLog(forkyHeader) - if headerCallback != nil: + let res = headerStore.updateFinalized(finalizedHeader) + + if res.isErr(): + error "finalized header update error", error = res.error() + + if ctx != nil: try: - headerCallback(cstring(Json.encode(forkyHeader)), 0) + ctx.onHeader(cstring(Json.encode(forkyHeader)), 0) except SerializationError as e: error "finalizedHeaderCallback exception", error = e.msg + else: + error "pre-bellatrix light client headers do not have the execution payload header" proc onOptimisticHeader( lightClient: LightClient, optimisticHeader: ForkedLightClientHeader ) = withForkyHeader(optimisticHeader): - when lcDataFork > LightClientDataFork.None: + when lcDataFork > LightClientDataFork.Altair: info "New LC optimistic header", optimistic_header = shortLog(forkyHeader) - if headerCallback != nil: + let res = headerStore.add(optimisticHeader) + + if res.isErr(): + error "header store add error", error = res.error() + + if ctx != nil: try: - headerCallback(cstring(Json.encode(forkyHeader)), 1) + ctx.onHeader(cstring(Json.encode(forkyHeader)), 1) except SerializationError as e: error "optimisticHeaderCallback exception", error = e.msg + else: + error "pre-bellatrix light client headers do not have the execution payload header" lightClient.onFinalizedHeader = onFinalizedHeader lightClient.onOptimisticHeader = onOptimisticHeader lightClient.trustedBlockRoot = some config.trustedBlockRoot + lightClient.installMessageValidators() func shouldSyncOptimistically(wallSlot: Slot): bool = let optimisticHeader = lightClient.optimisticHeader @@ -279,11 +253,12 @@ proc run*( blocksGossipState = targetGossipState - proc onSecond(time: Moment) = + proc updateGossipStatus(time: Moment) = let wallSlot = getBeaconTime().slotOrZero() updateBlocksGossipStatus(wallSlot + 1) lightClient.updateGossipStatus(wallSlot + 1) + # updates gossip status every second every second proc runOnSecondLoop() {.async.} = let sleepTime = chronos.seconds(1) while true: @@ -291,15 +266,20 @@ proc run*( await chronos.sleepAsync(sleepTime) let afterSleep = chronos.now(chronos.Moment) let sleepTime = afterSleep - start - onSecond(start) + updateGossipStatus(start) let finished = chronos.now(chronos.Moment) let processingTime = finished - afterSleep trace "onSecond task completed", sleepTime, processingTime - onSecond(Moment.now()) + # update gossip status before starting the light client + updateGossipStatus(Moment.now()) + # start the light client lightClient.start() + # launch a async routine asyncSpawn runOnSecondLoop() + + # run an infinite loop and wait for a stop signal while true: poll() if ctx != nil and ctx.stop: @@ -308,7 +288,7 @@ proc run*( waitFor rpcProxy.stop() ctx.cleanup() # Notify client that cleanup is finished - headerCallback(nil, 2) + ctx.onHeader(nil, 2) break when isMainModule: diff --git a/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim b/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim index 6ddca61080..01cfe91a92 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim @@ -1,5 +1,5 @@ # nimbus_verified_proxy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -64,6 +64,15 @@ type VerifiedProxyConf* = object # Config name: "data-dir" .}: OutDir + # In-Memory Cache Size + cacheLen* {. + hidden, + desc: "Length of the header cache maintained in memory", + defaultValue: 64, + defaultValueDesc: "64", + name: "debug-cache-len" + .}: int + # Network eth2Network* {. desc: "The Eth2 network to join", defaultValueDesc: "mainnet", name: "network" diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 5d0aac5133..a0d9a4a73c 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -15,7 +15,7 @@ import eth/common/accounts, web3/eth_api, ../validate_proof, - ../block_cache + ../header_store logScope: topics = "verified_proxy" @@ -23,7 +23,7 @@ logScope: type VerifiedRpcProxy* = ref object proxy: RpcProxy - blockCache: BlockCache + headerStore: HeaderStore chainId: UInt256 QuantityTagKind = enum @@ -52,15 +52,15 @@ func parseQuantityTag(blockTag: BlockTag): Result[QuantityTag, string] = return ok(QuantityTag(kind: BlockNumber, blockNumber: quantity)) template checkPreconditions(proxy: VerifiedRpcProxy) = - if proxy.blockCache.isEmpty(): + if proxy.headerStore.isEmpty(): raise newException(ValueError, "Syncing") template rpcClient(lcProxy: VerifiedRpcProxy): RpcClient = lcProxy.proxy.getClient() -proc getBlockByTag( +proc getHeaderByTag( proxy: VerifiedRpcProxy, quantityTag: BlockTag -): results.Opt[BlockObject] {.raises: [ValueError].} = +): results.Opt[Header] {.raises: [ValueError].} = checkPreconditions(proxy) let tag = parseQuantityTag(quantityTag).valueOr: @@ -69,14 +69,14 @@ proc getBlockByTag( case tag.kind of LatestBlock: # this will always return some block, as we always checkPreconditions - proxy.blockCache.latest + proxy.headerStore.latest of BlockNumber: - proxy.blockCache.getByNumber(tag.blockNumber) + proxy.headerStore.get(base.BlockNumber(distinctBase(tag.blockNumber))) -proc getBlockByTagOrThrow( +proc getHeaderByTagOrThrow( proxy: VerifiedRpcProxy, quantityTag: BlockTag -): BlockObject {.raises: [ValueError].} = - getBlockByTag(proxy, quantityTag).valueOr: +): Header {.raises: [ValueError].} = + getHeaderByTag(proxy, quantityTag).valueOr: raise newException(ValueError, "No block stored for given tag " & $quantityTag) proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = @@ -85,7 +85,7 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = lcProxy.proxy.rpc("eth_blockNumber") do() -> uint64: ## Returns the number of the most recent block. - let latest = lcProxy.blockCache.latest.valueOr: + let latest = lcProxy.headerStore.latest.valueOr: raise newException(ValueError, "Syncing") latest.number.uint64 @@ -98,15 +98,15 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = # can mean different blocks and ultimatly piece received piece of state # must by validated against correct state root let - blk = lcProxy.getBlockByTagOrThrow(quantityTag) - blockNumber = blk.number.uint64 + header = lcProxy.getHeaderByTagOrThrow(quantityTag) + blockNumber = header.number.uint64 info "Forwarding eth_getBalance call", blockNumber let proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) account = getAccountFromProof( - blk.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, + header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, proof.storageHash, proof.accountProof, ).valueOr: raise newException(ValueError, error) @@ -117,23 +117,23 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = address: Address, slot: UInt256, quantityTag: BlockTag ) -> UInt256: let - blk = lcProxy.getBlockByTagOrThrow(quantityTag) - blockNumber = blk.number.uint64 + header = lcProxy.getHeaderByTagOrThrow(quantityTag) + blockNumber = header.number.uint64 info "Forwarding eth_getStorageAt", blockNumber let proof = await lcProxy.rpcClient.eth_getProof(address, @[slot], blockId(blockNumber)) - getStorageData(blk.stateRoot, slot, proof).valueOr: + getStorageData(header.stateRoot, slot, proof).valueOr: raise newException(ValueError, error) lcProxy.proxy.rpc("eth_getTransactionCount") do( address: Address, quantityTag: BlockTag ) -> uint64: let - blk = lcProxy.getBlockByTagOrThrow(quantityTag) - blockNumber = blk.number.uint64 + header = lcProxy.getHeaderByTagOrThrow(quantityTag) + blockNumber = header.number.uint64 info "Forwarding eth_getTransactionCount", blockNumber @@ -141,7 +141,7 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) account = getAccountFromProof( - blk.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, + header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, proof.storageHash, proof.accountProof, ).valueOr: raise newException(ValueError, error) @@ -152,14 +152,14 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = address: Address, quantityTag: BlockTag ) -> seq[byte]: let - blk = lcProxy.getBlockByTagOrThrow(quantityTag) - blockNumber = blk.number.uint64 + header = lcProxy.getHeaderByTagOrThrow(quantityTag) + blockNumber = header.number.uint64 info "Forwarding eth_getCode", blockNumber let proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) account = getAccountFromProof( - blk.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, + header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, proof.storageHash, proof.accountProof, ).valueOr: raise newException(ValueError, error) @@ -184,23 +184,16 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = lcProxy.proxy.registerProxyMethod("eth_call") lcProxy.proxy.registerProxyMethod("eth_sendRawTransaction") lcProxy.proxy.registerProxyMethod("eth_getTransactionReceipt") - - # TODO currently we do not handle fullTransactions flag. It require updates on - # nim-web3 side - lcProxy.proxy.rpc("eth_getBlockByNumber") do( - quantityTag: BlockTag, fullTransactions: bool - ) -> Opt[BlockObject]: - lcProxy.getBlockByTag(quantityTag) - - lcProxy.proxy.rpc("eth_getBlockByHash") do( - blockHash: Hash32, fullTransactions: bool - ) -> Opt[BlockObject]: - lcProxy.blockCache.getPayloadByHash(blockHash) + lcProxy.proxy.registerProxyMethod("eth_getBlockByNumber") + lcProxy.proxy.registerProxyMethod("eth_getBlockByHash") proc new*( - T: type VerifiedRpcProxy, proxy: RpcProxy, blockCache: BlockCache, chainId: UInt256 + T: type VerifiedRpcProxy, + proxy: RpcProxy, + headerStore: HeaderStore, + chainId: UInt256, ): T = - VerifiedRpcProxy(proxy: proxy, blockCache: blockCache, chainId: chainId) + VerifiedRpcProxy(proxy: proxy, headerStore: headerStore, chainId: chainId) # Used to be in eth1_monitor.nim; not sure why it was deleted, # so I copied it here. --Adam diff --git a/nimbus_verified_proxy/tests/all_proxy_tests.nim b/nimbus_verified_proxy/tests/all_proxy_tests.nim new file mode 100644 index 0000000000..0bff1acc34 --- /dev/null +++ b/nimbus_verified_proxy/tests/all_proxy_tests.nim @@ -0,0 +1,8 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +import ./test_proof_validation, ./test_header_store diff --git a/nimbus_verified_proxy/tests/test_header_store.nim b/nimbus_verified_proxy/tests/test_header_store.nim new file mode 100644 index 0000000000..a497375fc6 --- /dev/null +++ b/nimbus_verified_proxy/tests/test_header_store.nim @@ -0,0 +1,126 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.used.} + +import + unittest2, + stint/endians2, + eth/common/headers_rlp, + ../header_store, + beacon_chain/spec/forks, + beacon_chain/spec/helpers + +func headerGenerator(number: int): ForkedLightClientHeader = + ForkedLightClientHeader( + kind: LightClientDataFork.Capella, + capellaData: capella.LightClientHeader( + beacon: default(capella.BeaconBlockHeader), + execution: capella.ExecutionPayloadHeader( + block_number: uint64(number), block_hash: Hash32(toBytesBE(u256(number))) + ), + execution_branch: default(capella.ExecutionBranch), + ), + ) + +suite "test proxy header store": + test "get from empty store": + let store = HeaderStore.new(1) + check store.get(default(Hash32)).isNone() + check store.get(default(BlockNumber)).isNone() + check store.latest.isNone() + check store.latestHash.isNone() + check store.len == 0 + check store.isEmpty() + + test "get from a non-pruned semi-filled store": + let store = HeaderStore.new(10) + for i in 0 ..< 5: + discard store.add(headerGenerator(i)) + + check store.len == 5 + check store.get(BlockNumber(0)).isSome() + check store.latest.isSome() + check store.latest.get().number == 4 + check store.latestHash.isSome() + check (not store.isEmpty()) + + test "header store auto pruning": + let store = HeaderStore.new(10) + for i in 0 ..< 10: + discard store.add(headerGenerator(i)) + + check store.get(BlockNumber(0)).isSome() + + discard store.add(headerGenerator(10)) + + check store.latest.isSome() + check store.latest.get().number == 10 + check store.get(BlockNumber(0)).isNone() + + test "duplicate addition should not work": + let store = HeaderStore.new(10) + for i in 0 ..< 11: + discard store.add(headerGenerator(i)) + + discard store.add(headerGenerator(10)) + + check store.latest.isSome() + check store.latest.get.number == 10 + check store.get(BlockNumber(1)).isSome() + + discard store.add(headerGenerator(11)) + + check store.latest.isSome() + check store.latest.get.number == 11 + check store.get(BlockNumber(1)).isNone() + + test "update finalized": + let store = HeaderStore.new(10) + for i in 0 ..< 10: + discard store.add(headerGenerator(i)) + + discard store.updateFinalized(headerGenerator(0)) + + check store.len == 10 + check store.get(BlockNumber(0)).isSome() + check store.finalized.isSome() + check store.finalizedHash.isSome() + check store.earliest.isSome() + check store.earliestHash.isSome() + check store.earliestHash.get() == store.finalizedHash.get() + check store.earliest.get() == store.finalized.get() + + discard store.updateFinalized(headerGenerator(1)) + + check store.earliest.get() != store.finalized.get() + check store.earliestHash.get() != store.finalizedHash.get() + check store.finalized.get().number == 1 + + test "add altair header": + let store = HeaderStore.new(5) + let altairHeader = ForkedLightClientHeader( + kind: LightClientDataFork.Altair, + altairData: altair.LightClientHeader(beacon: default(altair.BeaconBlockHeader)), + ) + let res = store.add(altairHeader) + + check res.isErr() + + test "add electra header": + let store = HeaderStore.new(5) + let electraHeader = ForkedLightClientHeader( + kind: LightClientDataFork.Electra, + electraData: electra.LightClientHeader( + beacon: default(electra.BeaconBlockHeader), + execution: electra.ExecutionPayloadHeader(block_number: uint64(232)), + execution_branch: default(capella.ExecutionBranch), + ), + ) + let res = store.add(electraHeader) + + check res.isOk() From bda9c1694d828a06a0cd2b0499039903724285eb Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Thu, 22 May 2025 15:38:02 +0200 Subject: [PATCH 030/138] Portal: Fix beacon lc bootstrap validation (#3316) The current_sync_committee_gindex is fork dependant, this causes bootstrap validation issue since electra. --- portal/network/beacon/beacon_network.nim | 4 ++-- portal/network/beacon/beacon_validation.nim | 10 +++++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index 49911306a2..cc6785b662 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -286,7 +286,7 @@ proc validateContent( forkyBootstrap.header.beacon: return err("Bootstrap header does not match recent finalized header") - if forkyBootstrap.isValidBootstrap(n.beaconDb.cfg): + if forkyBootstrap.isValidBootstrap(lcDataFork, n.beaconDb.cfg): ok() else: err("Error validating LC bootstrap") @@ -298,7 +298,7 @@ proc validateContent( if blockRoot != n.trustedBlockRoot.get(): return err("Bootstrap header does not match trusted block root") - if forkyBootstrap.isValidBootstrap(n.beaconDb.cfg): + if forkyBootstrap.isValidBootstrap(lcDataFork, n.beaconDb.cfg): ok() else: err("Error validating LC bootstrap") diff --git a/portal/network/beacon/beacon_validation.nim b/portal/network/beacon/beacon_validation.nim index b4d6a4175f..826edeab26 100644 --- a/portal/network/beacon/beacon_validation.nim +++ b/portal/network/beacon/beacon_validation.nim @@ -13,14 +13,18 @@ import beacon_chain/spec/forks, beacon_chain/spec/forks_light_client -func isValidBootstrap*(bootstrap: ForkyLightClientBootstrap, cfg: RuntimeConfig): bool = +func isValidBootstrap*( + bootstrap: ForkyLightClientBootstrap, + kind: static LightClientDataFork, + cfg: RuntimeConfig, +): bool = ## Verify if the bootstrap is valid. This does not verify if the header is ## part of the canonical chain. is_valid_light_client_header(bootstrap.header, cfg) and is_valid_merkle_branch( hash_tree_root(bootstrap.current_sync_committee), bootstrap.current_sync_committee_branch, - log2trunc(altair.CURRENT_SYNC_COMMITTEE_GINDEX), - get_subtree_index(altair.CURRENT_SYNC_COMMITTEE_GINDEX), + log2trunc(current_sync_committee_gindex(kind)), + get_subtree_index(current_sync_committee_gindex(kind)), bootstrap.header.beacon.state_root, ) From 351755cdf18ec5657f302a76de94b7835025ded7 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Fri, 23 May 2025 05:17:02 +0530 Subject: [PATCH 031/138] load the jwt secret from data-dir if available (#3320) * load the jwt secret from data-dir if available * fix --- execution_chain/rpc/jwt_auth.nim | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/execution_chain/rpc/jwt_auth.nim b/execution_chain/rpc/jwt_auth.nim index 2abf85bf58..8e7ef5a53e 100644 --- a/execution_chain/rpc/jwt_auth.nim +++ b/execution_chain/rpc/jwt_auth.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at # https://opensource.org/licenses/MIT). @@ -15,7 +15,7 @@ {.push gcsafe, raises: [].} import - std/[base64, options, strutils, times], + std/[os, base64, options, strutils, times], bearssl/rand, chronicles, chronos, @@ -209,7 +209,9 @@ proc jwtSharedSecret*( # startup, or show error and continue without exposing the authenticated # port. # - if config.jwtSecret.isNone: + var jwtSecretPath = config.dataDir.string / jwtSecretFile # default path + let jwtDoesNotExist = not fileExists(jwtSecretPath) + if config.jwtSecret.isNone and jwtDoesNotExist: # If such a parameter is not given, the client SHOULD generate such a # token, valid for the duration of the execution, and store it the # hex-encoded secret as a jwt.hex file on the filesystem. This file can @@ -218,7 +220,6 @@ proc jwtSharedSecret*( # github.com/ethereum/ # /execution-apis/blob/v1.0.0-alpha.8/src/engine/ # /authentication.md#key-distribution - let jwtSecretPath = config.dataDir.string & "/" & jwtSecretFile try: let newSecret = rndSecret() jwtSecretPath.writeFile(newSecret.JwtSharedKeyRaw.to0xHex) @@ -234,14 +235,16 @@ proc jwtSharedSecret*( return err(jwtCreationError) try: - let lines = config.jwtSecret.get.string.readLines(1) + if jwtDoesNotExist: + jwtSecretPath = config.jwtSecret.get.string + let lines = jwtSecretPath.readLines(1) if lines.len == 0: return err(jwtKeyEmptyFile) var key: JwtSharedKey let rc = key.fromHex(lines[0]) if rc.isErr: return err(rc.error) - info "JWT secret loaded", jwtSecretPath = config.jwtSecret.get.string + info "JWT secret loaded", jwtSecretPath = jwtSecretPath return ok(key) except IOError: return err(jwtKeyFileCannotOpen) From a92c26735d525e0069b0b4880ecc2bedf9fd897f Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 23 May 2025 08:55:04 +0700 Subject: [PATCH 032/138] Fusaka-devnet-0/EIP-7823: Set upper bounds for MODEXP (#3322) --- execution_chain/evm/precompiles.nim | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/execution_chain/evm/precompiles.nim b/execution_chain/evm/precompiles.nim index b08f2a37f0..3ddb2561ac 100644 --- a/execution_chain/evm/precompiles.nim +++ b/execution_chain/evm/precompiles.nim @@ -257,6 +257,11 @@ func modExp(c: Computation, fork: EVMFork = FkByzantium): EvmResultVoid = expLen = expL.safeInt modLen = modL.safeInt + if fork == FkOsaka: + # EIP-7823 + if baseLen > 1024 or expLen > 1024 or modLen > 1024: + return err(prcErr(PrcInvalidParam)) + let gasFee = ? modExpFee(c, baseL, expL, modL, fork) ? c.gasMeter.consumeGas(gasFee, reason="ModExp Precompile") From 86daa7846b18c40f846a32c790a9ad6d7972b053 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 23 May 2025 09:39:24 +0700 Subject: [PATCH 033/138] Fusaka-devnet-0/EIP-7883: ModExp Gas Cost Increase (#3323) * Fusaka-devnet-0/EIP-7883: ModExp Gas Cost Increase * Add tests --- PrecompileTests.md | 5 +- execution_chain/evm/precompiles.nim | 24 +++- .../PrecompileTests/modexp_eip7883.json | 126 ++++++++++++++++++ tools/common/helpers.nim | 2 + tools/common/types.nim | 1 + 5 files changed, 151 insertions(+), 7 deletions(-) create mode 100644 tests/fixtures/PrecompileTests/modexp_eip7883.json diff --git a/PrecompileTests.md b/PrecompileTests.md index 81205033cf..1c27644cb8 100644 --- a/PrecompileTests.md +++ b/PrecompileTests.md @@ -18,12 +18,13 @@ PrecompileTests + identity.json OK + modexp.json OK + modexp_eip2565.json OK ++ modexp_eip7883.json OK + pairing.json OK + pairing_istanbul.json OK + ripemd160.json OK + sha256.json OK ``` -OK: 20/20 Fail: 0/20 Skip: 0/20 +OK: 21/21 Fail: 0/21 Skip: 0/21 ## eest ```diff + add_G1_bls.json OK @@ -48,4 +49,4 @@ OK: 20/20 Fail: 0/20 Skip: 0/20 OK: 18/18 Fail: 0/18 Skip: 0/18 ---TOTAL--- -OK: 38/38 Fail: 0/38 Skip: 0/38 +OK: 39/39 Fail: 0/39 Skip: 0/39 diff --git a/execution_chain/evm/precompiles.nim b/execution_chain/evm/precompiles.nim index 3ddb2561ac..faed87e03a 100644 --- a/execution_chain/evm/precompiles.nim +++ b/execution_chain/evm/precompiles.nim @@ -204,6 +204,14 @@ func modExpFee(c: Computation, result = result div 8 result = result * result + func mulComplexityEIP7883(maxLen: UInt256): UInt256 = + # gas = ceil(x div 8) ^ 2 + result = maxLen + 7 + result = result div 8 + result = result * result + if maxLen > 32.u256: + result = result * 2 + let adjExpLen = block: let baseL = baseLen.safeInt @@ -217,10 +225,12 @@ func modExpFee(c: Computation, if first32.isZero(): 0.u256 else: first32.log2.u256 # highest-bit in exponent else: + let expMul = if fork >= FkOsaka: 16.u256 + else: 8.u256 if not first32.isZero: - 8.u256 * (expLen - 32.u256) + first32.log2.u256 + expMul * (expLen - 32.u256) + first32.log2.u256 else: - 8.u256 * (expLen - 32.u256) + expMul * (expLen - 32.u256) template gasCalc(comp, divisor: untyped): untyped = ( @@ -229,16 +239,20 @@ func modExpFee(c: Computation, ) div divisor # EIP2565: modExp gas cost - let gasFee = if fork >= FkBerlin: gasCalc(mulComplexityEIP2565, GasQuadDivisorEIP2565) + let gasFee = if fork >= FkOsaka: gasCalc(mulComplexityEIP7883, GasQuadDivisorEIP2565) + elif fork >= FkBerlin: gasCalc(mulComplexityEIP2565, GasQuadDivisorEIP2565) else: gasCalc(mulComplexity, GasQuadDivisor) if gasFee > high(GasInt).u256: return err(gasErr(OutOfGas)) + let minPrice = if fork >= FkOsaka: 500.GasInt + else: 200.GasInt + var res = gasFee.truncate(GasInt) # EIP2565: modExp gas cost - if fork >= FkBerlin and res < 200.GasInt: - res = 200.GasInt + if fork >= FkBerlin and res < minPrice: + res = minPrice ok(res) func modExp(c: Computation, fork: EVMFork = FkByzantium): EvmResultVoid = diff --git a/tests/fixtures/PrecompileTests/modexp_eip7883.json b/tests/fixtures/PrecompileTests/modexp_eip7883.json new file mode 100644 index 0000000000..61bca3bdcb --- /dev/null +++ b/tests/fixtures/PrecompileTests/modexp_eip7883.json @@ -0,0 +1,126 @@ +{ + "func": "modexp", + "fork": "osaka", + "data": + [ + { + "Input": "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002003fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Name": "eip_example1", + "Gas": 1360, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000000", + "Name": "eip_example2", + "Gas": 1360, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000040e09ad9675465c53a109fac66a445c91b292d2bb2c5268addb30cd82f80fcb0033ff97c80a5fc6f39193ae969c6ede6710a6b7ac27078a06d90ef1c72e5c85fb502fc9e1f6beb81516545975218075ec2af118cd8798df6e08a147c60fd6095ac2bb02c2908cf4dd7c81f11c289e4bce98f3553768f392a80ce22bf5c4f4a248c6b", + "Expected": "60008f1614cc01dcfb6bfb09c625cf90b47d4468db81b5f8b7a39d42f332eab9b2da8f2d95311648a8f243f4bb13cfb3d8f7f2a3c014122ebb3ed41b02783adc", + "Name": "nagydani-1-square", + "Gas": 500, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000040e09ad9675465c53a109fac66a445c91b292d2bb2c5268addb30cd82f80fcb0033ff97c80a5fc6f39193ae969c6ede6710a6b7ac27078a06d90ef1c72e5c85fb503fc9e1f6beb81516545975218075ec2af118cd8798df6e08a147c60fd6095ac2bb02c2908cf4dd7c81f11c289e4bce98f3553768f392a80ce22bf5c4f4a248c6b", + "Expected": "4834a46ba565db27903b1c720c9d593e84e4cbd6ad2e64b31885d944f68cd801f92225a8961c952ddf2797fa4701b330c85c4b363798100b921a1a22a46a7fec", + "Name": "nagydani-1-qube", + "Gas": 500, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000040e09ad9675465c53a109fac66a445c91b292d2bb2c5268addb30cd82f80fcb0033ff97c80a5fc6f39193ae969c6ede6710a6b7ac27078a06d90ef1c72e5c85fb5010001fc9e1f6beb81516545975218075ec2af118cd8798df6e08a147c60fd6095ac2bb02c2908cf4dd7c81f11c289e4bce98f3553768f392a80ce22bf5c4f4a248c6b", + "Expected": "c36d804180c35d4426b57b50c5bfcca5c01856d104564cd513b461d3c8b8409128a5573e416d0ebe38f5f736766d9dc27143e4da981dfa4d67f7dc474cbee6d2", + "Name": "nagydani-1-pow0x10001", + "Gas": 682, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080cad7d991a00047dd54d3399b6b0b937c718abddef7917c75b6681f40cc15e2be0003657d8d4c34167b2f0bbbca0ccaa407c2a6a07d50f1517a8f22979ce12a81dcaf707cc0cebfc0ce2ee84ee7f77c38b9281b9822a8d3de62784c089c9b18dcb9a2a5eecbede90ea788a862a9ddd9d609c2c52972d63e289e28f6a590ffbf5102e6d893b80aeed5e6e9ce9afa8a5d5675c93a32ac05554cb20e9951b2c140e3ef4e433068cf0fb73bc9f33af1853f64aa27a0028cbf570d7ac9048eae5dc7b28c87c31e5810f1e7fa2cda6adf9f1076dbc1ec1238560071e7efc4e9565c49be9e7656951985860a558a754594115830bcdb421f741408346dd5997bb01c287087", + "Expected": "981dd99c3b113fae3e3eaa9435c0dc96779a23c12a53d1084b4f67b0b053a27560f627b873e3f16ad78f28c94f14b6392def26e4d8896c5e3c984e50fa0b3aa44f1da78b913187c6128baa9340b1e9c9a0fd02cb78885e72576da4a8f7e5a113e173a7a2889fde9d407bd9f06eb05bc8fc7b4229377a32941a02bf4edcc06d70", + "Name": "nagydani-2-square", + "Gas": 500, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080cad7d991a00047dd54d3399b6b0b937c718abddef7917c75b6681f40cc15e2be0003657d8d4c34167b2f0bbbca0ccaa407c2a6a07d50f1517a8f22979ce12a81dcaf707cc0cebfc0ce2ee84ee7f77c38b9281b9822a8d3de62784c089c9b18dcb9a2a5eecbede90ea788a862a9ddd9d609c2c52972d63e289e28f6a590ffbf5103e6d893b80aeed5e6e9ce9afa8a5d5675c93a32ac05554cb20e9951b2c140e3ef4e433068cf0fb73bc9f33af1853f64aa27a0028cbf570d7ac9048eae5dc7b28c87c31e5810f1e7fa2cda6adf9f1076dbc1ec1238560071e7efc4e9565c49be9e7656951985860a558a754594115830bcdb421f741408346dd5997bb01c287087", + "Expected": "d89ceb68c32da4f6364978d62aaa40d7b09b59ec61eb3c0159c87ec3a91037f7dc6967594e530a69d049b64adfa39c8fa208ea970cfe4b7bcd359d345744405afe1cbf761647e32b3184c7fbe87cee8c6c7ff3b378faba6c68b83b6889cb40f1603ee68c56b4c03d48c595c826c041112dc941878f8c5be828154afd4a16311f", + "Name": "nagydani-2-qube", + "Gas": 500, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000080cad7d991a00047dd54d3399b6b0b937c718abddef7917c75b6681f40cc15e2be0003657d8d4c34167b2f0bbbca0ccaa407c2a6a07d50f1517a8f22979ce12a81dcaf707cc0cebfc0ce2ee84ee7f77c38b9281b9822a8d3de62784c089c9b18dcb9a2a5eecbede90ea788a862a9ddd9d609c2c52972d63e289e28f6a590ffbf51010001e6d893b80aeed5e6e9ce9afa8a5d5675c93a32ac05554cb20e9951b2c140e3ef4e433068cf0fb73bc9f33af1853f64aa27a0028cbf570d7ac9048eae5dc7b28c87c31e5810f1e7fa2cda6adf9f1076dbc1ec1238560071e7efc4e9565c49be9e7656951985860a558a754594115830bcdb421f741408346dd5997bb01c287087", + "Expected": "ad85e8ef13fd1dd46eae44af8b91ad1ccae5b7a1c92944f92a19f21b0b658139e0cabe9c1f679507c2de354bf2c91ebd965d1e633978a830d517d2f6f8dd5fd58065d58559de7e2334a878f8ec6992d9b9e77430d4764e863d77c0f87beede8f2f7f2ab2e7222f85cc9d98b8467f4bb72e87ef2882423ebdb6daf02dddac6db2", + "Name": "nagydani-2-pow0x10001", + "Gas": 2730, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000100c9130579f243e12451760976261416413742bd7c91d39ae087f46794062b8c239f2a74abf3918605a0e046a7890e049475ba7fbb78f5de6490bd22a710cc04d30088179a919d86c2da62cf37f59d8f258d2310d94c24891be2d7eeafaa32a8cb4b0cfe5f475ed778f45907dc8916a73f03635f233f7a77a00a3ec9ca6761a5bbd558a2318ecd0caa1c5016691523e7e1fa267dd35e70c66e84380bdcf7c0582f540174e572c41f81e93da0b757dff0b0fe23eb03aa19af0bdec3afb474216febaacb8d0381e631802683182b0fe72c28392539850650b70509f54980241dc175191a35d967288b532a7a8223ce2440d010615f70df269501944d4ec16fe4a3cb02d7a85909174757835187cb52e71934e6c07ef43b4c46fc30bbcd0bc72913068267c54a4aabebb493922492820babdeb7dc9b1558fcf7bd82c37c82d3147e455b623ab0efa752fe0b3a67ca6e4d126639e645a0bf417568adbb2a6a4eef62fa1fa29b2a5a43bebea1f82193a7dd98eb483d09bb595af1fa9c97c7f41f5649d976aee3e5e59e2329b43b13bea228d4a93f16ba139ccb511de521ffe747aa2eca664f7c9e33da59075cc335afcd2bf3ae09765f01ab5a7c3e3938ec168b74724b5074247d200d9970382f683d6059b94dbc336603d1dfee714e4b447ac2fa1d99ecb4961da2854e03795ed758220312d101e1e3d87d5313a6d052aebde75110363d", + "Expected": "affc7507ea6d84751ec6b3f0d7b99dbcc263f33330e450d1b3ff0bc3d0874320bf4edd57debd587306988157958cb3cfd369cc0c9c198706f635c9e0f15d047df5cb44d03e2727f26b083c4ad8485080e1293f171c1ed52aef5993a5815c35108e848c951cf1e334490b4a539a139e57b68f44fee583306f5b85ffa57206b3ee5660458858534e5386b9584af3c7f67806e84c189d695e5eb96e1272d06ec2df5dc5fabc6e94b793718c60c36be0a4d031fc84cd658aa72294b2e16fc240aef70cb9e591248e38bd49c5a554d1afa01f38dab72733092f7555334bbef6c8c430119840492380aa95fa025dcf699f0a39669d812b0c6946b6091e6e235337b6f8", + "Name": "nagydani-3-square", + "Gas": 682, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000100c9130579f243e12451760976261416413742bd7c91d39ae087f46794062b8c239f2a74abf3918605a0e046a7890e049475ba7fbb78f5de6490bd22a710cc04d30088179a919d86c2da62cf37f59d8f258d2310d94c24891be2d7eeafaa32a8cb4b0cfe5f475ed778f45907dc8916a73f03635f233f7a77a00a3ec9ca6761a5bbd558a2318ecd0caa1c5016691523e7e1fa267dd35e70c66e84380bdcf7c0582f540174e572c41f81e93da0b757dff0b0fe23eb03aa19af0bdec3afb474216febaacb8d0381e631802683182b0fe72c28392539850650b70509f54980241dc175191a35d967288b532a7a8223ce2440d010615f70df269501944d4ec16fe4a3cb03d7a85909174757835187cb52e71934e6c07ef43b4c46fc30bbcd0bc72913068267c54a4aabebb493922492820babdeb7dc9b1558fcf7bd82c37c82d3147e455b623ab0efa752fe0b3a67ca6e4d126639e645a0bf417568adbb2a6a4eef62fa1fa29b2a5a43bebea1f82193a7dd98eb483d09bb595af1fa9c97c7f41f5649d976aee3e5e59e2329b43b13bea228d4a93f16ba139ccb511de521ffe747aa2eca664f7c9e33da59075cc335afcd2bf3ae09765f01ab5a7c3e3938ec168b74724b5074247d200d9970382f683d6059b94dbc336603d1dfee714e4b447ac2fa1d99ecb4961da2854e03795ed758220312d101e1e3d87d5313a6d052aebde75110363d", + "Expected": "1b280ecd6a6bf906b806d527c2a831e23b238f89da48449003a88ac3ac7150d6a5e9e6b3be4054c7da11dd1e470ec29a606f5115801b5bf53bc1900271d7c3ff3cd5ed790d1c219a9800437a689f2388ba1a11d68f6a8e5b74e9a3b1fac6ee85fc6afbac599f93c391f5dc82a759e3c6c0ab45ce3f5d25d9b0c1bf94cf701ea6466fc9a478dacc5754e593172b5111eeba88557048bceae401337cd4c1182ad9f700852bc8c99933a193f0b94cf1aedbefc48be3bc93ef5cb276d7c2d5462ac8bb0c8fe8923a1db2afe1c6b90d59c534994a6a633f0ead1d638fdc293486bb634ff2c8ec9e7297c04241a61c37e3ae95b11d53343d4ba2b4cc33d2cfa7eb705e", + "Name": "nagydani-3-qube", + "Gas": 682, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000100c9130579f243e12451760976261416413742bd7c91d39ae087f46794062b8c239f2a74abf3918605a0e046a7890e049475ba7fbb78f5de6490bd22a710cc04d30088179a919d86c2da62cf37f59d8f258d2310d94c24891be2d7eeafaa32a8cb4b0cfe5f475ed778f45907dc8916a73f03635f233f7a77a00a3ec9ca6761a5bbd558a2318ecd0caa1c5016691523e7e1fa267dd35e70c66e84380bdcf7c0582f540174e572c41f81e93da0b757dff0b0fe23eb03aa19af0bdec3afb474216febaacb8d0381e631802683182b0fe72c28392539850650b70509f54980241dc175191a35d967288b532a7a8223ce2440d010615f70df269501944d4ec16fe4a3cb010001d7a85909174757835187cb52e71934e6c07ef43b4c46fc30bbcd0bc72913068267c54a4aabebb493922492820babdeb7dc9b1558fcf7bd82c37c82d3147e455b623ab0efa752fe0b3a67ca6e4d126639e645a0bf417568adbb2a6a4eef62fa1fa29b2a5a43bebea1f82193a7dd98eb483d09bb595af1fa9c97c7f41f5649d976aee3e5e59e2329b43b13bea228d4a93f16ba139ccb511de521ffe747aa2eca664f7c9e33da59075cc335afcd2bf3ae09765f01ab5a7c3e3938ec168b74724b5074247d200d9970382f683d6059b94dbc336603d1dfee714e4b447ac2fa1d99ecb4961da2854e03795ed758220312d101e1e3d87d5313a6d052aebde75110363d", + "Expected": "37843d7c67920b5f177372fa56e2a09117df585f81df8b300fba245b1175f488c99476019857198ed459ed8d9799c377330e49f4180c4bf8e8f66240c64f65ede93d601f957b95b83efdee1e1bfde74169ff77002eaf078c71815a9220c80b2e3b3ff22c2f358111d816ebf83c2999026b6de50bfc711ff68705d2f40b753424aefc9f70f08d908b5a20276ad613b4ab4309a3ea72f0c17ea9df6b3367d44fb3acab11c333909e02e81ea2ed404a712d3ea96bba87461720e2d98723e7acd0520ac1a5212dbedcd8dc0c1abf61d4719e319ff4758a774790b8d463cdfe131d1b2dcfee52d002694e98e720cb6ae7ccea353bc503269ba35f0f63bf8d7b672a76", + "Name": "nagydani-3-pow0x10001", + "Gas": 10922, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000200db34d0e438249c0ed685c949cc28776a05094e1c48691dc3f2dca5fc3356d2a0663bd376e4712839917eb9a19c670407e2c377a2de385a3ff3b52104f7f1f4e0c7bf7717fb913896693dc5edbb65b760ef1b00e42e9d8f9af17352385e1cd742c9b006c0f669995cb0bb21d28c0aced2892267637b6470d8cee0ab27fc5d42658f6e88240c31d6774aa60a7ebd25cd48b56d0da11209f1928e61005c6eb709f3e8e0aaf8d9b10f7d7e296d772264dc76897ccdddadc91efa91c1903b7232a9e4c3b941917b99a3bc0c26497dedc897c25750af60237aa67934a26a2bc491db3dcc677491944bc1f51d3e5d76b8d846a62db03dedd61ff508f91a56d71028125035c3a44cbb041497c83bf3e4ae2a9613a401cc721c547a2afa3b16a2969933d3626ed6d8a7428648f74122fd3f2a02a20758f7f693892c8fd798b39abac01d18506c45e71432639e9f9505719ee822f62ccbf47f6850f096ff77b5afaf4be7d772025791717dbe5abf9b3f40cff7d7aab6f67e38f62faf510747276e20a42127e7500c444f9ed92baf65ade9e836845e39c4316d9dce5f8e2c8083e2c0acbb95296e05e51aab13b6b8f53f06c9c4276e12b0671133218cc3ea907da3bd9a367096d9202128d14846cc2e20d56fc8473ecb07cecbfb8086919f3971926e7045b853d85a69d026195c70f9f7a823536e2a8f4b3e12e94d9b53a934353451094b8102df3143a0057457d75e8c708b6337a6f5a4fd1a06727acf9fb93e2993c62f3378b37d56c85e7b1e00f0145ebf8e4095bd723166293c60b6ac1252291ef65823c9e040ddad14969b3b340a4ef714db093a587c37766d68b8d6b5016e741587e7e6bf7e763b44f0247e64bae30f994d248bfd20541a333e5b225ef6a61199e301738b1e688f70ec1d7fb892c183c95dc543c3e12adf8a5e8b9ca9d04f9445cced3ab256f29e998e69efaa633a7b60e1db5a867924ccab0a171d9d6e1098dfa15acde9553de599eaa56490c8f411e4985111f3d40bddfc5e301edb01547b01a886550a61158f7e2033c59707789bf7c854181d0c2e2a42a93cf09209747d7082e147eb8544de25c3eb14f2e35559ea0c0f5877f2f3fc92132c0ae9da4e45b2f6c866a224ea6d1f28c05320e287750fbc647368d41116e528014cc1852e5531d53e4af938374daba6cee4baa821ed07117253bb3601ddd00d59a3d7fb2ef1f5a2fbba7c429f0cf9a5b3462410fd833a69118f8be9c559b1000cc608fd877fb43f8e65c2d1302622b944462579056874b387208d90623fcdaf93920ca7a9e4ba64ea208758222ad868501cc2c345e2d3a5ea2a17e5069248138c8a79c0251185d29ee73e5afab5354769142d2bf0cb6712727aa6bf84a6245fcdae66e4938d84d1b9dd09a884818622080ff5f98942fb20acd7e0c916c2d5ea7ce6f7e173315384518f", + "Expected": "8a5aea5f50dcc03dc7a7a272b5aeebc040554dbc1ffe36753c4fc75f7ed5f6c2cc0de3a922bf96c78bf0643a73025ad21f45a4a5cadd717612c511ab2bff1190fe5f1ae05ba9f8fe3624de1de2a817da6072ddcdb933b50216811dbe6a9ca79d3a3c6b3a476b079fd0d05f04fb154e2dd3e5cb83b148a006f2bcbf0042efb2ae7b916ea81b27aac25c3bf9a8b6d35440062ad8eae34a83f3ffa2cc7b40346b62174a4422584f72f95316f6b2bee9ff232ba9739301c97c99a9ded26c45d72676eb856ad6ecc81d36a6de36d7f9dafafee11baa43a4b0d5e4ecffa7b9b7dcefd58c397dd373e6db4acd2b2c02717712e6289bed7c813b670c4a0c6735aa7f3b0f1ce556eae9fcc94b501b2c8781ba50a8c6220e8246371c3c7359fe4ef9da786ca7d98256754ca4e496be0a9174bedbecb384bdf470779186d6a833f068d2838a88d90ef3ad48ff963b67c39cc5a3ee123baf7bf3125f64e77af7f30e105d72c4b9b5b237ed251e4c122c6d8c1405e736299c3afd6db16a28c6a9cfa68241e53de4cd388271fe534a6a9b0dbea6171d170db1b89858468885d08fecbd54c8e471c3e25d48e97ba450b96d0d87e00ac732aaa0d3ce4309c1064bd8a4c0808a97e0143e43a24cfa847635125cd41c13e0574487963e9d725c01375db99c31da67b4cf65eff555f0c0ac416c727ff8d438ad7c42030551d68c2e7adda0abb1ca7c10", + "Name": "nagydani-4-square", + "Gas": 2730, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000200db34d0e438249c0ed685c949cc28776a05094e1c48691dc3f2dca5fc3356d2a0663bd376e4712839917eb9a19c670407e2c377a2de385a3ff3b52104f7f1f4e0c7bf7717fb913896693dc5edbb65b760ef1b00e42e9d8f9af17352385e1cd742c9b006c0f669995cb0bb21d28c0aced2892267637b6470d8cee0ab27fc5d42658f6e88240c31d6774aa60a7ebd25cd48b56d0da11209f1928e61005c6eb709f3e8e0aaf8d9b10f7d7e296d772264dc76897ccdddadc91efa91c1903b7232a9e4c3b941917b99a3bc0c26497dedc897c25750af60237aa67934a26a2bc491db3dcc677491944bc1f51d3e5d76b8d846a62db03dedd61ff508f91a56d71028125035c3a44cbb041497c83bf3e4ae2a9613a401cc721c547a2afa3b16a2969933d3626ed6d8a7428648f74122fd3f2a02a20758f7f693892c8fd798b39abac01d18506c45e71432639e9f9505719ee822f62ccbf47f6850f096ff77b5afaf4be7d772025791717dbe5abf9b3f40cff7d7aab6f67e38f62faf510747276e20a42127e7500c444f9ed92baf65ade9e836845e39c4316d9dce5f8e2c8083e2c0acbb95296e05e51aab13b6b8f53f06c9c4276e12b0671133218cc3ea907da3bd9a367096d9202128d14846cc2e20d56fc8473ecb07cecbfb8086919f3971926e7045b853d85a69d026195c70f9f7a823536e2a8f4b3e12e94d9b53a934353451094b8103df3143a0057457d75e8c708b6337a6f5a4fd1a06727acf9fb93e2993c62f3378b37d56c85e7b1e00f0145ebf8e4095bd723166293c60b6ac1252291ef65823c9e040ddad14969b3b340a4ef714db093a587c37766d68b8d6b5016e741587e7e6bf7e763b44f0247e64bae30f994d248bfd20541a333e5b225ef6a61199e301738b1e688f70ec1d7fb892c183c95dc543c3e12adf8a5e8b9ca9d04f9445cced3ab256f29e998e69efaa633a7b60e1db5a867924ccab0a171d9d6e1098dfa15acde9553de599eaa56490c8f411e4985111f3d40bddfc5e301edb01547b01a886550a61158f7e2033c59707789bf7c854181d0c2e2a42a93cf09209747d7082e147eb8544de25c3eb14f2e35559ea0c0f5877f2f3fc92132c0ae9da4e45b2f6c866a224ea6d1f28c05320e287750fbc647368d41116e528014cc1852e5531d53e4af938374daba6cee4baa821ed07117253bb3601ddd00d59a3d7fb2ef1f5a2fbba7c429f0cf9a5b3462410fd833a69118f8be9c559b1000cc608fd877fb43f8e65c2d1302622b944462579056874b387208d90623fcdaf93920ca7a9e4ba64ea208758222ad868501cc2c345e2d3a5ea2a17e5069248138c8a79c0251185d29ee73e5afab5354769142d2bf0cb6712727aa6bf84a6245fcdae66e4938d84d1b9dd09a884818622080ff5f98942fb20acd7e0c916c2d5ea7ce6f7e173315384518f", + "Expected": "5a2664252aba2d6e19d9600da582cdd1f09d7a890ac48e6b8da15ae7c6ff1856fc67a841ac2314d283ffa3ca81a0ecf7c27d89ef91a5a893297928f5da0245c99645676b481b7e20a566ee6a4f2481942bee191deec5544600bb2441fd0fb19e2ee7d801ad8911c6b7750affec367a4b29a22942c0f5f4744a4e77a8b654da2a82571037099e9c6d930794efe5cdca73c7b6c0844e386bdca8ea01b3d7807146bb81365e2cdc6475f8c23e0ff84463126189dc9789f72bbce2e3d2d114d728a272f1345122de23df54c922ec7a16e5c2a8f84da8871482bd258c20a7c09bbcd64c7a96a51029bbfe848736a6ba7bf9d931a9b7de0bcaf3635034d4958b20ae9ab3a95a147b0421dd5f7ebff46c971010ebfc4adbbe0ad94d5498c853e7142c450d8c71de4b2f84edbf8acd2e16d00c8115b150b1c30e553dbb82635e781379fe2a56360420ff7e9f70cc64c00aba7e26ed13c7c19622865ae07248daced36416080f35f8cc157a857ed70ea4f347f17d1bee80fa038abd6e39b1ba06b97264388b21364f7c56e192d4b62d9b161405f32ab1e2594e86243e56fcf2cb30d21adef15b9940f91af681da24328c883d892670c6aa47940867a81830a82b82716895db810df1b834640abefb7db2092dd92912cb9a735175bc447be40a503cf22dfe565b4ed7a3293ca0dfd63a507430b323ee248ec82e843b673c97ad730728cebc", + "Name": "nagydani-4-qube", + "Gas": 2730, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000200db34d0e438249c0ed685c949cc28776a05094e1c48691dc3f2dca5fc3356d2a0663bd376e4712839917eb9a19c670407e2c377a2de385a3ff3b52104f7f1f4e0c7bf7717fb913896693dc5edbb65b760ef1b00e42e9d8f9af17352385e1cd742c9b006c0f669995cb0bb21d28c0aced2892267637b6470d8cee0ab27fc5d42658f6e88240c31d6774aa60a7ebd25cd48b56d0da11209f1928e61005c6eb709f3e8e0aaf8d9b10f7d7e296d772264dc76897ccdddadc91efa91c1903b7232a9e4c3b941917b99a3bc0c26497dedc897c25750af60237aa67934a26a2bc491db3dcc677491944bc1f51d3e5d76b8d846a62db03dedd61ff508f91a56d71028125035c3a44cbb041497c83bf3e4ae2a9613a401cc721c547a2afa3b16a2969933d3626ed6d8a7428648f74122fd3f2a02a20758f7f693892c8fd798b39abac01d18506c45e71432639e9f9505719ee822f62ccbf47f6850f096ff77b5afaf4be7d772025791717dbe5abf9b3f40cff7d7aab6f67e38f62faf510747276e20a42127e7500c444f9ed92baf65ade9e836845e39c4316d9dce5f8e2c8083e2c0acbb95296e05e51aab13b6b8f53f06c9c4276e12b0671133218cc3ea907da3bd9a367096d9202128d14846cc2e20d56fc8473ecb07cecbfb8086919f3971926e7045b853d85a69d026195c70f9f7a823536e2a8f4b3e12e94d9b53a934353451094b81010001df3143a0057457d75e8c708b6337a6f5a4fd1a06727acf9fb93e2993c62f3378b37d56c85e7b1e00f0145ebf8e4095bd723166293c60b6ac1252291ef65823c9e040ddad14969b3b340a4ef714db093a587c37766d68b8d6b5016e741587e7e6bf7e763b44f0247e64bae30f994d248bfd20541a333e5b225ef6a61199e301738b1e688f70ec1d7fb892c183c95dc543c3e12adf8a5e8b9ca9d04f9445cced3ab256f29e998e69efaa633a7b60e1db5a867924ccab0a171d9d6e1098dfa15acde9553de599eaa56490c8f411e4985111f3d40bddfc5e301edb01547b01a886550a61158f7e2033c59707789bf7c854181d0c2e2a42a93cf09209747d7082e147eb8544de25c3eb14f2e35559ea0c0f5877f2f3fc92132c0ae9da4e45b2f6c866a224ea6d1f28c05320e287750fbc647368d41116e528014cc1852e5531d53e4af938374daba6cee4baa821ed07117253bb3601ddd00d59a3d7fb2ef1f5a2fbba7c429f0cf9a5b3462410fd833a69118f8be9c559b1000cc608fd877fb43f8e65c2d1302622b944462579056874b387208d90623fcdaf93920ca7a9e4ba64ea208758222ad868501cc2c345e2d3a5ea2a17e5069248138c8a79c0251185d29ee73e5afab5354769142d2bf0cb6712727aa6bf84a6245fcdae66e4938d84d1b9dd09a884818622080ff5f98942fb20acd7e0c916c2d5ea7ce6f7e173315384518f", + "Expected": "bed8b970c4a34849fc6926b08e40e20b21c15ed68d18f228904878d4370b56322d0da5789da0318768a374758e6375bfe4641fca5285ec7171828922160f48f5ca7efbfee4d5148612c38ad683ae4e3c3a053d2b7c098cf2b34f2cb19146eadd53c86b2d7ccf3d83b2c370bfb840913ee3879b1057a6b4e07e110b6bcd5e958bc71a14798c91d518cc70abee264b0d25a4110962a764b364ac0b0dd1ee8abc8426d775ec0f22b7e47b32576afaf1b5a48f64573ed1c5c29f50ab412188d9685307323d990802b81dacc06c6e05a1e901830ba9fcc67688dc29c5e27bde0a6e845ca925f5454b6fb3747edfaa2a5820838fb759eadf57f7cb5cec57fc213ddd8a4298fa079c3c0f472b07fb15aa6a7f0a3780bd296ff6a62e58ef443870b02260bd4fd2bbc98255674b8e1f1f9f8d33c7170b0ebbea4523b695911abbf26e41885344823bd0587115fdd83b721a4e8457a31c9a84b3d3520a07e0e35df7f48e5a9d534d0ec7feef1ff74de6a11e7f93eab95175b6ce22c68d78a642ad642837897ec11349205d8593ac19300207572c38d29ca5dfa03bc14cdbc32153c80e5cc3e739403d34c75915e49beb43094cc6dcafb3665b305ddec9286934ae66ec6b777ca528728c851318eb0f207b39f1caaf96db6eeead6b55ed08f451939314577d42bcc9f97c0b52d0234f88fd07e4c1d7780fdebc025cfffcb572cb27a8c33963", + "Name": "nagydani-4-pow0x10001", + "Gas": 43690, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000400c5a1611f8be90071a43db23cc2fe01871cc4c0e8ab5743f6378e4fef77f7f6db0095c0727e20225beb665645403453e325ad5f9aeb9ba99bf3c148f63f9c07cf4fe8847ad5242d6b7d4499f93bd47056ddab8f7dee878fc2314f344dbee2a7c41a5d3db91eff372c730c2fdd3a141a4b61999e36d549b9870cf2f4e632c4d5df5f024f81c028000073a0ed8847cfb0593d36a47142f578f05ccbe28c0c06aeb1b1da027794c48db880278f79ba78ae64eedfea3c07d10e0562668d839749dc95f40467d15cf65b9cfc52c7c4bcef1cda3596dd52631aac942f146c7cebd46065131699ce8385b0db1874336747ee020a5698a3d1a1082665721e769567f579830f9d259cec1a836845109c21cf6b25da572512bf3c42fd4b96e43895589042ab60dd41f497db96aec102087fe784165bb45f942859268fd2ff6c012d9d00c02ba83eace047cc5f7b2c392c2955c58a49f0338d6fc58749c9db2155522ac17914ec216ad87f12e0ee95574613942fa615898c4d9e8a3be68cd6afa4e7a003dedbdf8edfee31162b174f965b20ae752ad89c967b3068b6f722c16b354456ba8e280f987c08e0a52d40a2e8f3a59b94d590aeef01879eb7a90b3ee7d772c839c85519cbeaddc0c193ec4874a463b53fcaea3271d80ebfb39b33489365fc039ae549a17a9ff898eea2f4cb27b8dbee4c17b998438575b2b8d107e4a0d66ba7fca85b41a58a8d51f191a35c856dfbe8aef2b00048a694bbccff832d23c8ca7a7ff0b6c0b3011d00b97c86c0628444d267c951d9e4fb8f83e154b8f74fb51aa16535e498235c5597dac9606ed0be3173a3836baa4e7d756ffe1e2879b415d3846bccd538c05b847785699aefde3e305decb600cd8fb0e7d8de5efc26971a6ad4e6d7a2d91474f1023a0ac4b78dc937da0ce607a45974d2cac1c33a2631ff7fe6144a3b2e5cf98b531a9627dea92c1dc82204d09db0439b6a11dd64b484e1263aa45fd9539b6020b55e3baece3986a8bffc1003406348f5c61265099ed43a766ee4f93f5f9c5abbc32a0fd3ac2b35b87f9ec26037d88275bd7dd0a54474995ee34ed3727f3f97c48db544b1980193a4b76a8a3ddab3591ce527f16d91882e67f0103b5cda53f7da54d489fc4ac08b6ab358a5a04aa9daa16219d50bd672a7cb804ed769d218807544e5993f1c27427104b349906a0b654df0bf69328afd3013fbe430155339c39f236df5557bf92f1ded7ff609a8502f49064ec3d1dbfb6c15d3a4c11a4f8acd12278cbf68acd5709463d12e3338a6eddb8c112f199645e23154a8e60879d2a654e3ed9296aa28f134168619691cd2c6b9e2eba4438381676173fc63c2588a3c5910dc149cf3760f0aa9fa9c3f5faa9162b0bf1aac9dd32b706a60ef53cbdb394b6b40222b5bc80eea82ba8958386672564cae3794f977871ab62337cf02e30049201ec12937e7ce79d0f55d9c810e20acf52212aca1d3888949e0e4830aad88d804161230eb89d4d329cc83570fe257217d2119134048dd2ed167646975fc7d77136919a049ea74cf08ddd2b896890bb24a0ba18094a22baa351bf29ad96c66bbb1a598f2ca391749620e62d61c3561a7d3653ccc8892c7b99baaf76bf836e2991cb06d6bc0514568ff0d1ec8bb4b3d6984f5eaefb17d3ea2893722375d3ddb8e389a8eef7d7d198f8e687d6a513983df906099f9a2d23f4f9dec6f8ef2f11fc0a21fac45353b94e00486f5e17d386af42502d09db33cf0cf28310e049c07e88682aeeb00cb833c5174266e62407a57583f1f88b304b7c6e0c84bbe1c0fd423072d37a5bd0aacf764229e5c7cd02473460ba3645cd8e8ae144065bf02d0dd238593d8e230354f67e0b2f23012c23274f80e3ee31e35e2606a4a3f31d94ab755e6d163cff52cbb36b6d0cc67ffc512aeed1dce4d7a0d70ce82f2baba12e8d514dc92a056f994adfb17b5b9712bd5186f27a2fda1f7039c5df2c8587fdc62f5627580c13234b55be4df3056050e2d1ef3218f0dd66cb05265fe1acfb0989d8213f2c19d1735a7cf3fa65d88dad5af52dc2bba22b7abf46c3bc77b5091baab9e8f0ddc4d5e581037de91a9f8dcbc69309be29cc815cf19a20a7585b8b3073edf51fc9baeb3e509b97fa4ecfd621e0fd57bd61cac1b895c03248ff12bdbc57509250df3517e8a3fe1d776836b34ab352b973d932ef708b14f7418f9eceb1d87667e61e3e758649cb083f01b133d37ab2f5afa96d6c84bcacf4efc3851ad308c1e7d9113624fce29fab460ab9d2a48d92cdb281103a5250ad44cb2ff6e67ac670c02fdafb3e0f1353953d6d7d5646ca1568dea55275a050ec501b7c6250444f7219f1ba7521ba3b93d089727ca5f3bbe0d6c1300b423377004954c5628fdb65770b18ced5c9b23a4a5a6d6ef25fe01b4ce278de0bcc4ed86e28a0a68818ffa40970128cf2c38740e80037984428c1bd5113f40ff47512ee6f4e4d8f9b8e8e1b3040d2928d003bd1c1329dc885302fbce9fa81c23b4dc49c7c82d29b52957847898676c89aa5d32b5b0e1c0d5a2b79a19d67562f407f19425687971a957375879d90c5f57c857136c17106c9ab1b99d80e69c8c954ed386493368884b55c939b8d64d26f643e800c56f90c01079d7c534e3b2b7ae352cefd3016da55f6a85eb803b85e2304915fd2001f77c74e28746293c46e4f5f0fd49cf988aafd0026b8e7a3bab2da5cdce1ea26c2e29ec03f4807fac432662b2d6c060be1c7be0e5489de69d0a6e03a4b9117f9244b34a0f1ecba89884f781c6320412413a00c4980287409a2a78c2cd7e65cecebbe4ec1c28cac4dd95f6998e78fc6f1392384331c9436aa10e10e2bf8ad2c4eafbcf276aa7bae64b74428911b3269c749338b0fc5075ad", + "Expected": "d61fe4e3f32ac260915b5b03b78a86d11bfc41d973fce5b0cc59035cf8289a8a2e3878ea15fa46565b0d806e2f85b53873ea20ed653869b688adf83f3ef444535bf91598ff7e80f334fb782539b92f39f55310cc4b35349ab7b278346eda9bc37c0d8acd3557fae38197f412f8d9e57ce6a76b7205c23564cab06e5615be7c6f05c3d05ec690cba91da5e89d55b152ff8dd2157dc5458190025cf94b1ad98f7cbe64e9482faba95e6b33844afc640892872b44a9932096508f4a782a4805323808f23e54b6ff9b841dbfa87db3505ae4f687972c18ea0f0d0af89d36c1c2a5b14560c153c3fee406f5cf15cfd1c0bb45d767426d465f2f14c158495069d0c5955a00150707862ecaae30624ebacdd8ac33e4e6aab3ff90b6ba445a84689386b9e945d01823a65874444316e83767290fcff630d2477f49d5d8ffdd200e08ee1274270f86ed14c687895f6caf5ce528bd970c20d2408a9ba66216324c6a011ac4999098362dbd98a038129a2d40c8da6ab88318aa3046cb660327cc44236d9e5d2163bd0959062195c51ed93d0088b6f92051fc99050ece2538749165976233697ab4b610385366e5ce0b02ad6b61c168ecfbedcdf74278a38de340fd7a5fead8e588e294795f9b011e2e60377a89e25c90e145397cdeabc60fd32444a6b7642a611a83c464d8b8976666351b4865c37b02e6dc21dbcdf5f930341707b618cc0f03c3122646b3385c9df9f2ec730eec9d49e7dfc9153b6e6289da8c4f0ebea9ccc1b751948e3bb7171c9e4d57423b0eeeb79095c030cb52677b3f7e0b45c30f645391f3f9c957afa549c4e0b2465b03c67993cd200b1af01035962edbc4c9e89b31c82ac121987d6529dafdeef67a132dc04b6dc68e77f22862040b75e2ceb9ff16da0fca534e6db7bd12fa7b7f51b6c08c1e23dfcdb7acbd2da0b51c87ffbced065a612e9b1c8bba9b7e2d8d7a2f04fcc4aaf355b60d764879a76b5e16762d5f2f55d585d0c8e82df6940960cddfb72c91dfa71f6b4e1c6ca25dfc39a878e998a663c04fe29d5e83b9586d047b4d7ff70a9f0d44f127e7d741685ca75f11629128d916a0ffef4be586a30c4b70389cc746e84ebf177c01ee8a4511cfbb9d1ecf7f7b33c7dd8177896e10bbc82f838dcd6db7ac67de62bf46b6a640fb580c5d1d2708f3862e3d2b645d0d18e49ef088053e3a220adc0e033c2afcfe61c90e32151152eb3caaf746c5e377d541cafc6cbb0cc0fa48b5caf1728f2e1957f5addfc234f1a9d89e40d49356c9172d0561a695fce6dab1d412321bbf407f63766ffd7b6b3d79bcfa07991c5a9709849c1008689e3b47c50d613980bec239fb64185249d055b30375ccb4354d71fe4d05648fbf6c80634dfc3575f2f24abb714c1e4c95e8896763bf4316e954c7ad19e5780ab7a040ca6fb9271f90a8b22ae738daf6cb", + "Name": "nagydani-5-square", + "Gas": 10922, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000400c5a1611f8be90071a43db23cc2fe01871cc4c0e8ab5743f6378e4fef77f7f6db0095c0727e20225beb665645403453e325ad5f9aeb9ba99bf3c148f63f9c07cf4fe8847ad5242d6b7d4499f93bd47056ddab8f7dee878fc2314f344dbee2a7c41a5d3db91eff372c730c2fdd3a141a4b61999e36d549b9870cf2f4e632c4d5df5f024f81c028000073a0ed8847cfb0593d36a47142f578f05ccbe28c0c06aeb1b1da027794c48db880278f79ba78ae64eedfea3c07d10e0562668d839749dc95f40467d15cf65b9cfc52c7c4bcef1cda3596dd52631aac942f146c7cebd46065131699ce8385b0db1874336747ee020a5698a3d1a1082665721e769567f579830f9d259cec1a836845109c21cf6b25da572512bf3c42fd4b96e43895589042ab60dd41f497db96aec102087fe784165bb45f942859268fd2ff6c012d9d00c02ba83eace047cc5f7b2c392c2955c58a49f0338d6fc58749c9db2155522ac17914ec216ad87f12e0ee95574613942fa615898c4d9e8a3be68cd6afa4e7a003dedbdf8edfee31162b174f965b20ae752ad89c967b3068b6f722c16b354456ba8e280f987c08e0a52d40a2e8f3a59b94d590aeef01879eb7a90b3ee7d772c839c85519cbeaddc0c193ec4874a463b53fcaea3271d80ebfb39b33489365fc039ae549a17a9ff898eea2f4cb27b8dbee4c17b998438575b2b8d107e4a0d66ba7fca85b41a58a8d51f191a35c856dfbe8aef2b00048a694bbccff832d23c8ca7a7ff0b6c0b3011d00b97c86c0628444d267c951d9e4fb8f83e154b8f74fb51aa16535e498235c5597dac9606ed0be3173a3836baa4e7d756ffe1e2879b415d3846bccd538c05b847785699aefde3e305decb600cd8fb0e7d8de5efc26971a6ad4e6d7a2d91474f1023a0ac4b78dc937da0ce607a45974d2cac1c33a2631ff7fe6144a3b2e5cf98b531a9627dea92c1dc82204d09db0439b6a11dd64b484e1263aa45fd9539b6020b55e3baece3986a8bffc1003406348f5c61265099ed43a766ee4f93f5f9c5abbc32a0fd3ac2b35b87f9ec26037d88275bd7dd0a54474995ee34ed3727f3f97c48db544b1980193a4b76a8a3ddab3591ce527f16d91882e67f0103b5cda53f7da54d489fc4ac08b6ab358a5a04aa9daa16219d50bd672a7cb804ed769d218807544e5993f1c27427104b349906a0b654df0bf69328afd3013fbe430155339c39f236df5557bf92f1ded7ff609a8502f49064ec3d1dbfb6c15d3a4c11a4f8acd12278cbf68acd5709463d12e3338a6eddb8c112f199645e23154a8e60879d2a654e3ed9296aa28f134168619691cd2c6b9e2eba4438381676173fc63c2588a3c5910dc149cf3760f0aa9fa9c3f5faa9162b0bf1aac9dd32b706a60ef53cbdb394b6b40222b5bc80eea82ba8958386672564cae3794f977871ab62337cf03e30049201ec12937e7ce79d0f55d9c810e20acf52212aca1d3888949e0e4830aad88d804161230eb89d4d329cc83570fe257217d2119134048dd2ed167646975fc7d77136919a049ea74cf08ddd2b896890bb24a0ba18094a22baa351bf29ad96c66bbb1a598f2ca391749620e62d61c3561a7d3653ccc8892c7b99baaf76bf836e2991cb06d6bc0514568ff0d1ec8bb4b3d6984f5eaefb17d3ea2893722375d3ddb8e389a8eef7d7d198f8e687d6a513983df906099f9a2d23f4f9dec6f8ef2f11fc0a21fac45353b94e00486f5e17d386af42502d09db33cf0cf28310e049c07e88682aeeb00cb833c5174266e62407a57583f1f88b304b7c6e0c84bbe1c0fd423072d37a5bd0aacf764229e5c7cd02473460ba3645cd8e8ae144065bf02d0dd238593d8e230354f67e0b2f23012c23274f80e3ee31e35e2606a4a3f31d94ab755e6d163cff52cbb36b6d0cc67ffc512aeed1dce4d7a0d70ce82f2baba12e8d514dc92a056f994adfb17b5b9712bd5186f27a2fda1f7039c5df2c8587fdc62f5627580c13234b55be4df3056050e2d1ef3218f0dd66cb05265fe1acfb0989d8213f2c19d1735a7cf3fa65d88dad5af52dc2bba22b7abf46c3bc77b5091baab9e8f0ddc4d5e581037de91a9f8dcbc69309be29cc815cf19a20a7585b8b3073edf51fc9baeb3e509b97fa4ecfd621e0fd57bd61cac1b895c03248ff12bdbc57509250df3517e8a3fe1d776836b34ab352b973d932ef708b14f7418f9eceb1d87667e61e3e758649cb083f01b133d37ab2f5afa96d6c84bcacf4efc3851ad308c1e7d9113624fce29fab460ab9d2a48d92cdb281103a5250ad44cb2ff6e67ac670c02fdafb3e0f1353953d6d7d5646ca1568dea55275a050ec501b7c6250444f7219f1ba7521ba3b93d089727ca5f3bbe0d6c1300b423377004954c5628fdb65770b18ced5c9b23a4a5a6d6ef25fe01b4ce278de0bcc4ed86e28a0a68818ffa40970128cf2c38740e80037984428c1bd5113f40ff47512ee6f4e4d8f9b8e8e1b3040d2928d003bd1c1329dc885302fbce9fa81c23b4dc49c7c82d29b52957847898676c89aa5d32b5b0e1c0d5a2b79a19d67562f407f19425687971a957375879d90c5f57c857136c17106c9ab1b99d80e69c8c954ed386493368884b55c939b8d64d26f643e800c56f90c01079d7c534e3b2b7ae352cefd3016da55f6a85eb803b85e2304915fd2001f77c74e28746293c46e4f5f0fd49cf988aafd0026b8e7a3bab2da5cdce1ea26c2e29ec03f4807fac432662b2d6c060be1c7be0e5489de69d0a6e03a4b9117f9244b34a0f1ecba89884f781c6320412413a00c4980287409a2a78c2cd7e65cecebbe4ec1c28cac4dd95f6998e78fc6f1392384331c9436aa10e10e2bf8ad2c4eafbcf276aa7bae64b74428911b3269c749338b0fc5075ad", + "Expected": "5f9c70ec884926a89461056ad20ac4c30155e817f807e4d3f5bb743d789c83386762435c3627773fa77da5144451f2a8aad8adba88e0b669f5377c5e9bad70e45c86fe952b613f015a9953b8a5de5eaee4566acf98d41e327d93a35bd5cef4607d025e58951167957df4ff9b1627649d3943805472e5e293d3efb687cfd1e503faafeb2840a3e3b3f85d016051a58e1c9498aab72e63b748d834b31eb05d85dcde65e27834e266b85c75cc4ec0135135e0601cb93eeeb6e0010c8ceb65c4c319623c5e573a2c8c9fbbf7df68a930beb412d3f4dfd146175484f45d7afaa0d2e60684af9b34730f7c8438465ad3e1d0c3237336722f2aa51095bd5759f4b8ab4dda111b684aa3dac62a761722e7ae43495b7709933512c81c4e3c9133a51f7ce9f2b51fcec064f65779666960b4e45df3900f54311f5613e8012dd1b8efd359eda31a778264c72aa8bb419d862734d769076bce2810011989a45374e5c5d8729fec21427f0bf397eacbb4220f603cf463a4b0c94efd858ffd9768cd60d6ce68d755e0fbad007ce5c2223d70c7018345a102e4ab3c60a13a9e7794303156d4c2063e919f2153c13961fb324c80b240742f47773a7a8e25b3e3fb19b00ce839346c6eb3c732fbc6b888df0b1fe0a3d07b053a2e9402c267b2d62f794d8a2840526e3ade15ce2264496ccd7519571dfde47f7a4bb16292241c20b2be59f3f8fb4f6383f232d838c5a22d8c95b6834d9d2ca493f5a505ebe8899503b0e8f9b19e6e2dd81c1628b80016d02097e0134de51054c4e7674824d4d758760fc52377d2cad145e259aa2ffaf54139e1a66b1e0c1c191e32ac59474c6b526f5b3ba07d3e5ec286eddf531fcd5292869be58c9f22ef91026159f7cf9d05ef66b4299f4da48cc1635bf2243051d342d378a22c83390553e873713c0454ce5f3234397111ac3fe3207b86f0ed9fc025c81903e1748103692074f83824fda6341be4f95ff00b0a9a208c267e12fa01825054cc0513629bf3dbb56dc5b90d4316f87654a8be18227978ea0a8a522760cad620d0d14fd38920fb7321314062914275a5f99f677145a6979b156bd82ecd36f23f8e1273cc2759ecc0b2c69d94dad5211d1bed939dd87ed9e07b91d49713a6e16ade0a98aea789f04994e318e4ff2c8a188cd8d43aeb52c6daa3bc29b4af50ea82a247c5cd67b573b34cbadcc0a376d3bbd530d50367b42705d870f2e27a8197ef46070528bfe408360faa2ebb8bf76e9f388572842bcb119f4d84ee34ae31f5cc594f23705a49197b181fb78ed1ec99499c690f843a4d0cf2e226d118e9372271054fbabdcc5c92ae9fefaef0589cd0e722eaf30c1703ec4289c7fd81beaa8a455ccee5298e31e2080c10c366a6fcf56f7d13582ad0bcad037c612b710fc595b70fbefaaca23623b60c6c39b11beb8e5843b6b3dac60f", + "Name": "nagydani-5-qube", + "Gas": 10922, + "NoBenchmark": false + }, + { + "Input": "000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000400c5a1611f8be90071a43db23cc2fe01871cc4c0e8ab5743f6378e4fef77f7f6db0095c0727e20225beb665645403453e325ad5f9aeb9ba99bf3c148f63f9c07cf4fe8847ad5242d6b7d4499f93bd47056ddab8f7dee878fc2314f344dbee2a7c41a5d3db91eff372c730c2fdd3a141a4b61999e36d549b9870cf2f4e632c4d5df5f024f81c028000073a0ed8847cfb0593d36a47142f578f05ccbe28c0c06aeb1b1da027794c48db880278f79ba78ae64eedfea3c07d10e0562668d839749dc95f40467d15cf65b9cfc52c7c4bcef1cda3596dd52631aac942f146c7cebd46065131699ce8385b0db1874336747ee020a5698a3d1a1082665721e769567f579830f9d259cec1a836845109c21cf6b25da572512bf3c42fd4b96e43895589042ab60dd41f497db96aec102087fe784165bb45f942859268fd2ff6c012d9d00c02ba83eace047cc5f7b2c392c2955c58a49f0338d6fc58749c9db2155522ac17914ec216ad87f12e0ee95574613942fa615898c4d9e8a3be68cd6afa4e7a003dedbdf8edfee31162b174f965b20ae752ad89c967b3068b6f722c16b354456ba8e280f987c08e0a52d40a2e8f3a59b94d590aeef01879eb7a90b3ee7d772c839c85519cbeaddc0c193ec4874a463b53fcaea3271d80ebfb39b33489365fc039ae549a17a9ff898eea2f4cb27b8dbee4c17b998438575b2b8d107e4a0d66ba7fca85b41a58a8d51f191a35c856dfbe8aef2b00048a694bbccff832d23c8ca7a7ff0b6c0b3011d00b97c86c0628444d267c951d9e4fb8f83e154b8f74fb51aa16535e498235c5597dac9606ed0be3173a3836baa4e7d756ffe1e2879b415d3846bccd538c05b847785699aefde3e305decb600cd8fb0e7d8de5efc26971a6ad4e6d7a2d91474f1023a0ac4b78dc937da0ce607a45974d2cac1c33a2631ff7fe6144a3b2e5cf98b531a9627dea92c1dc82204d09db0439b6a11dd64b484e1263aa45fd9539b6020b55e3baece3986a8bffc1003406348f5c61265099ed43a766ee4f93f5f9c5abbc32a0fd3ac2b35b87f9ec26037d88275bd7dd0a54474995ee34ed3727f3f97c48db544b1980193a4b76a8a3ddab3591ce527f16d91882e67f0103b5cda53f7da54d489fc4ac08b6ab358a5a04aa9daa16219d50bd672a7cb804ed769d218807544e5993f1c27427104b349906a0b654df0bf69328afd3013fbe430155339c39f236df5557bf92f1ded7ff609a8502f49064ec3d1dbfb6c15d3a4c11a4f8acd12278cbf68acd5709463d12e3338a6eddb8c112f199645e23154a8e60879d2a654e3ed9296aa28f134168619691cd2c6b9e2eba4438381676173fc63c2588a3c5910dc149cf3760f0aa9fa9c3f5faa9162b0bf1aac9dd32b706a60ef53cbdb394b6b40222b5bc80eea82ba8958386672564cae3794f977871ab62337cf010001e30049201ec12937e7ce79d0f55d9c810e20acf52212aca1d3888949e0e4830aad88d804161230eb89d4d329cc83570fe257217d2119134048dd2ed167646975fc7d77136919a049ea74cf08ddd2b896890bb24a0ba18094a22baa351bf29ad96c66bbb1a598f2ca391749620e62d61c3561a7d3653ccc8892c7b99baaf76bf836e2991cb06d6bc0514568ff0d1ec8bb4b3d6984f5eaefb17d3ea2893722375d3ddb8e389a8eef7d7d198f8e687d6a513983df906099f9a2d23f4f9dec6f8ef2f11fc0a21fac45353b94e00486f5e17d386af42502d09db33cf0cf28310e049c07e88682aeeb00cb833c5174266e62407a57583f1f88b304b7c6e0c84bbe1c0fd423072d37a5bd0aacf764229e5c7cd02473460ba3645cd8e8ae144065bf02d0dd238593d8e230354f67e0b2f23012c23274f80e3ee31e35e2606a4a3f31d94ab755e6d163cff52cbb36b6d0cc67ffc512aeed1dce4d7a0d70ce82f2baba12e8d514dc92a056f994adfb17b5b9712bd5186f27a2fda1f7039c5df2c8587fdc62f5627580c13234b55be4df3056050e2d1ef3218f0dd66cb05265fe1acfb0989d8213f2c19d1735a7cf3fa65d88dad5af52dc2bba22b7abf46c3bc77b5091baab9e8f0ddc4d5e581037de91a9f8dcbc69309be29cc815cf19a20a7585b8b3073edf51fc9baeb3e509b97fa4ecfd621e0fd57bd61cac1b895c03248ff12bdbc57509250df3517e8a3fe1d776836b34ab352b973d932ef708b14f7418f9eceb1d87667e61e3e758649cb083f01b133d37ab2f5afa96d6c84bcacf4efc3851ad308c1e7d9113624fce29fab460ab9d2a48d92cdb281103a5250ad44cb2ff6e67ac670c02fdafb3e0f1353953d6d7d5646ca1568dea55275a050ec501b7c6250444f7219f1ba7521ba3b93d089727ca5f3bbe0d6c1300b423377004954c5628fdb65770b18ced5c9b23a4a5a6d6ef25fe01b4ce278de0bcc4ed86e28a0a68818ffa40970128cf2c38740e80037984428c1bd5113f40ff47512ee6f4e4d8f9b8e8e1b3040d2928d003bd1c1329dc885302fbce9fa81c23b4dc49c7c82d29b52957847898676c89aa5d32b5b0e1c0d5a2b79a19d67562f407f19425687971a957375879d90c5f57c857136c17106c9ab1b99d80e69c8c954ed386493368884b55c939b8d64d26f643e800c56f90c01079d7c534e3b2b7ae352cefd3016da55f6a85eb803b85e2304915fd2001f77c74e28746293c46e4f5f0fd49cf988aafd0026b8e7a3bab2da5cdce1ea26c2e29ec03f4807fac432662b2d6c060be1c7be0e5489de69d0a6e03a4b9117f9244b34a0f1ecba89884f781c6320412413a00c4980287409a2a78c2cd7e65cecebbe4ec1c28cac4dd95f6998e78fc6f1392384331c9436aa10e10e2bf8ad2c4eafbcf276aa7bae64b74428911b3269c749338b0fc5075ad", + "Expected": "5a0eb2bdf0ac1cae8e586689fa16cd4b07dfdedaec8a110ea1fdb059dd5253231b6132987598dfc6e11f86780428982d50cf68f67ae452622c3b336b537ef3298ca645e8f89ee39a26758206a5a3f6409afc709582f95274b57b71fae5c6b74619ae6f089a5393c5b79235d9caf699d23d88fb873f78379690ad8405e34c19f5257d596580c7a6a7206a3712825afe630c76b31cdb4a23e7f0632e10f14f4e282c81a66451a26f8df2a352b5b9f607a7198449d1b926e27036810368e691a74b91c61afa73d9d3b99453e7c8b50fd4f09c039a2f2feb5c419206694c31b92df1d9586140cb3417b38d0c503c7b508cc2ed12e813a1c795e9829eb39ee78eeaf360a169b491a1d4e419574e712402de9d48d54c1ae5e03739b7156615e8267e1fb0a897f067afd11fb33f6e24182d7aaaaa18fe5bc1982f20d6b871e5a398f0f6f718181d31ec225cfa9a0a70124ed9a70031bdf0c1c7829f708b6e17d50419ef361cf77d99c85f44607186c8d683106b8bd38a49b5d0fb503b397a83388c5678dcfcc737499d84512690701ed621a6f0172aecf037184ddf0f2453e4053024018e5ab2e30d6d5363b56e8b41509317c99042f517247474ab3abc848e00a07f69c254f46f2a05cf6ed84e5cc906a518fdcfdf2c61ce731f24c5264f1a25fc04934dc28aec112134dd523f70115074ca34e3807aa4cb925147f3a0ce152d323bd8c675ace446d0fd1ae30c4b57f0eb2c23884bc18f0964c0114796c5b6d080c3d89175665fbf63a6381a6a9da39ad070b645c8bb1779506da14439a9f5b5d481954764ea114fac688930bc68534d403cff4210673b6a6ff7ae416b7cd41404c3d3f282fcd193b86d0f54d0006c2a503b40d5c3930da980565b8f9630e9493a79d1c03e74e5f93ac8e4dc1a901ec5e3b3e57049124c7b72ea345aa359e782285d9e6a5c144a378111dd02c40855ff9c2be9b48425cb0b2fd62dc8678fd151121cf26a65e917d65d8e0dacfae108eb5508b601fb8ffa370be1f9a8b749a2d12eeab81f41079de87e2d777994fa4d28188c579ad327f9957fb7bdecec5c680844dd43cb57cf87aeb763c003e65011f73f8c63442df39a92b946a6bd968a1c1e4d5fa7d88476a68bd8e20e5b70a99259c7d3f85fb1b65cd2e93972e6264e74ebf289b8b6979b9b68a85cd5b360c1987f87235c3c845d62489e33acf85d53fa3561fe3a3aee18924588d9c6eba4edb7a4d106b31173e42929f6f0c48c80ce6a72d54eca7c0fe870068b7a7c89c63cdda593f5b32d3cb4ea8a32c39f00ab449155757172d66763ed9527019d6de6c9f2416aa6203f4d11c9ebee1e1d3845099e55504446448027212616167eb36035726daa7698b075286f5379cd3e93cb3e0cf4f9cb8d017facbb5550ed32d5ec5400ae57e47e2bf78d1eaeff9480cc765ceff39db500", + "Name": "nagydani-5-pow0x10001", + "Gas": 174762, + "NoBenchmark": false + } + ] +} diff --git a/tools/common/helpers.nim b/tools/common/helpers.nim index ca51d89a8d..28f5be689d 100644 --- a/tools/common/helpers.nim +++ b/tools/common/helpers.nim @@ -131,6 +131,8 @@ func getChainConfig*(network: string, c: ChainConfig) = c.assignTime(HardFork.Prague, TimeZero) of $TestFork.CancunToPragueAtTime15k: c.assignTime(HardFork.Prague, EthTime(15000)) + of $TestFork.Osaka: + c.assignTime(HardFork.Osaka, TimeZero) else: raise newException(ValueError, "unsupported network " & network) diff --git a/tools/common/types.nim b/tools/common/types.nim index 933588d0df..2231083113 100644 --- a/tools/common/types.nim +++ b/tools/common/types.nim @@ -39,6 +39,7 @@ type ShanghaiToCancunAtTime15k Prague CancunToPragueAtTime15k + Osaka LogLevel* = enum Silent From 2bb6ff03d5fbe684e95daac0578b69b37d14ebd4 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 23 May 2025 10:42:35 +0700 Subject: [PATCH 034/138] Fix outdated txparse tools (#3324) --- tests/test_tools_build.nim | 4 +++- tools/txparse/config.nims | 12 ++++++++++++ tools/txparse/txparse.nim | 17 +++++++++++++---- tools/txparse/txparse_test.nim | 5 +++-- 4 files changed, 31 insertions(+), 7 deletions(-) create mode 100644 tools/txparse/config.nims diff --git a/tests/test_tools_build.nim b/tests/test_tools_build.nim index 96a171ba01..9560078ac3 100644 --- a/tests/test_tools_build.nim +++ b/tests/test_tools_build.nim @@ -20,4 +20,6 @@ import ../tools/t8n/t8n_test, ../tools/t8n/t8n_debug, ../tools/evmstate/evmstate, - ../tools/evmstate/evmstate_test + ../tools/evmstate/evmstate_test, + ../tools/txparse/txparse, + ../tools/txparse/txparse_test diff --git a/tools/txparse/config.nims b/tools/txparse/config.nims new file mode 100644 index 0000000000..0941fe22d9 --- /dev/null +++ b/tools/txparse/config.nims @@ -0,0 +1,12 @@ +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. + +# chronicles log output will intefere with txparse ouput +switch("define", "chronicles_enabled=off") diff --git a/tools/txparse/txparse.nim b/tools/txparse/txparse.nim index 05f523cc33..e8732680f5 100644 --- a/tools/txparse/txparse.nim +++ b/tools/txparse/txparse.nim @@ -12,17 +12,21 @@ import eth/[common, rlp], stew/byteutils, eth/common/transaction_utils, + ../common/helpers, ../../execution_chain/transaction, - ../../execution_chain/common/evmforks + ../../execution_chain/core/validate, + ../../execution_chain/common/evmforks, + ../../execution_chain/common/common -proc parseTx(hexLine: string) = +proc parseTx(com: CommonRef, hexLine: string) = try: let bytes = hexToSeqByte(hexLine) tx = decodeTx(bytes) address = tx.recoverSender().expect("valid signature") - tx.validate(FkLondon) + validateTxBasic(com, tx, FkPrague).isOkOr: + echo "err: ", error # everything ok echo "0x", address.toHex @@ -37,7 +41,12 @@ proc parseTx(hexLine: string) = echo "err: malformed rlp" proc main() = + let + memDB = newCoreDbRef DefaultDbMemory + config = getChainConfig("Prague") + com = CommonRef.new(memDB, nil, config) + for hexLine in stdin.lines: - parseTx(hexLine) + com.parseTx(hexLine) main() diff --git a/tools/txparse/txparse_test.nim b/tools/txparse/txparse_test.nim index cefd55fe82..6ba9b31c13 100644 --- a/tools/txparse/txparse_test.nim +++ b/tools/txparse/txparse_test.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2022-2023 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -74,7 +74,7 @@ proc runTest(fileName: string): bool = proc collectFileNames(inputPath: string, fileNames: var seq[TestFile]) = for filename in walkDirRec(inputPath): - if not fileName.endsWith(".json"): + if not filename.endsWith(".json"): continue fileNames.add TestFile( @@ -88,6 +88,7 @@ proc main() = collectFileNames(testData, filenames) for input in filenames: + let input = input # Workaround compiler lent issue test input.dispName: let res = runTest(input.fullPath) check true == res From 60e4838b478a2e462f46ea3ffa00fa1ade24c292 Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Fri, 23 May 2025 09:23:21 +0000 Subject: [PATCH 035/138] Beacon sync rewrote state handler fixed some assert after suspending (#3325) * Code cosmetics, docu/comment and logging updates, etc. details + rename `worker_config` -> `worker_const` + move syncer state from `worker_desc` to `worker_const` + update ticker (for debugging only) + rename `headers` and `bodies` to `*_fetch` * Rename syncer state symbols and provide unified state display details For logging, provide tools to display as a single text symbol + control/state (Running, Zombie, etc.) + syncer state (headers/block processing, cancelled, etc.) + header cache mode + etc. * Re-write syncer state machine why The code became horrible after several changes and extensions. So maintenance became a no-no. There will me more extension and correction so a code update was badly needed. * Remove crufty reorg counter detail The reorg counter is not used anymore. It was useful a year ago for debugging, only. * Redesign blocks sync cancel procedure (and update header cancel proc) note This patch fixes issue #3321 when the syncer state becomes idle (ready for suspending the syncer) while there are still some `FC` import pseudo-parallel processes running. This might lead to some unexpected late bookkeeping entries causing an assertion exception when the *books* are checked. why Cancelling block processing was wrongly implemented by setting the scheduler flag `poolMode` directly. This worked but is hard to debug an not really extensible. Now the cancel procedure from the block processor works by sending a request to the state machine which in turn runs a clean shutdown of the block download. Also, any cancel and finish state commits only after all the system pseudo-parallel tasks were synchronised using `poolMode`. * Fix copyright year --- execution_chain/sync/beacon/worker.nim | 2 +- .../sync/beacon/worker/blocks_staged.nim | 61 +--- .../{bodies.nim => bodies_fetch.nim} | 8 +- .../worker/blocks_staged/staged_blocks.nim | 24 +- .../worker/blocks_staged/staged_queue.nim | 5 +- .../sync/beacon/worker/headers_staged.nim | 32 +-- .../{headers.nim => headers_fetch.nim} | 10 +- .../worker/headers_staged/staged_collect.nim | 27 +- .../worker/headers_staged/staged_queue.nim | 5 +- .../sync/beacon/worker/headers_unproc.nim | 1 - .../sync/beacon/worker/helpers.nim | 16 +- .../sync/beacon/worker/start_stop.nim | 2 +- execution_chain/sync/beacon/worker/update.nim | 270 +++++++++--------- .../sync/beacon/worker/update/ticker.nim | 41 ++- .../{worker_config.nim => worker_const.nim} | 9 + execution_chain/sync/beacon/worker_desc.nim | 33 ++- 16 files changed, 272 insertions(+), 274 deletions(-) rename execution_chain/sync/beacon/worker/blocks_staged/{bodies.nim => bodies_fetch.nim} (94%) rename execution_chain/sync/beacon/worker/headers_staged/{headers.nim => headers_fetch.nim} (94%) rename execution_chain/sync/beacon/{worker_config.nim => worker_const.nim} (91%) diff --git a/execution_chain/sync/beacon/worker.nim b/execution_chain/sync/beacon/worker.nim index dddbf2441a..42028b1257 100644 --- a/execution_chain/sync/beacon/worker.nim +++ b/execution_chain/sync/beacon/worker.nim @@ -79,7 +79,7 @@ proc start*(buddy: BeaconBuddyRef; info: static[string]): bool = proc stop*(buddy: BeaconBuddyRef; info: static[string]) = ## Clean up this peer if not buddy.ctx.hibernate: debug info & ": release peer", peer=buddy.peer, - ctrl=buddy.ctrl.state, nSyncPeers=(buddy.ctx.pool.nBuddies-1), + nSyncPeers=(buddy.ctx.pool.nBuddies-1), syncState=($buddy.syncState), nLaps=buddy.only.nMultiLoop, lastIdleGap=buddy.only.multiRunIdle.toStr buddy.stopBuddy() diff --git a/execution_chain/sync/beacon/worker/blocks_staged.nim b/execution_chain/sync/beacon/worker/blocks_staged.nim index fee39f6c5e..1a8850f685 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged.nim @@ -16,8 +16,8 @@ import pkg/stew/[interval_set, sorted_set], ../../../networking/p2p, ../worker_desc, - ./blocks_staged/[bodies, staged_blocks], - ./[blocks_unproc, helpers] + ./blocks_staged/[bodies_fetch, staged_blocks], + ./blocks_unproc # ------------------------------------------------------------------------------ # Private function(s) @@ -36,23 +36,13 @@ proc blocksStagedProcessImpl( ## between the top of the `topImported` and the least queue block number. ## if ctx.blk.staged.len == 0: - trace info & ": blocksStagedProcess empty queue", peer=($maybePeer), - topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, - poolMode=ctx.poolMode, syncState=ctx.pool.lastState, - nSyncPeers=ctx.pool.nBuddies return false # switch peer var nImported = 0u64 # statistics switchPeer = false # for return code - trace info & ": blocksStagedProcess start", peer=($maybePeer), - topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, - poolMode=ctx.poolMode, syncState=ctx.pool.lastState, - nSyncPeers=ctx.pool.nBuddies - - var minNum = BlockNumber(0) - while ctx.pool.lastState == processingBlocks: + while ctx.pool.lastState == SyncState.blocks: # Fetch list with the least block numbers let qItem = ctx.blk.staged.ge(0).valueOr: @@ -60,7 +50,7 @@ proc blocksStagedProcessImpl( # Make sure that the lowest block is available, already. Or the other way # round: no unprocessed block number range precedes the least staged block. - minNum = qItem.data.blocks[0].header.number + let minNum = qItem.data.blocks[0].header.number if ctx.blk.topImported + 1 < minNum: trace info & ": block queue not ready yet", peer=($maybePeer), topImported=ctx.blk.topImported.bnStr, qItem=qItem.data.blocks.bnStr, @@ -92,10 +82,6 @@ proc blocksStagedProcessImpl( topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies - trace info & ": blocksStagedProcess end", peer=($maybePeer), - topImported=ctx.blk.topImported.bnStr, nImported, minNum, - nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer - return not switchPeer # ------------------------------------------------------------------------------ @@ -173,11 +159,6 @@ proc blocksStagedCollect*( if bottom < ctx.blk.topImported: discard ctx.blocksUnprocFetch(ctx.blk.topImported - bottom).expect("iv") - trace info & ": blocksStagedCollect direct loop", peer, - ctrl=buddy.ctrl.state, poolMode=ctx.poolMode, - syncState=ctx.pool.lastState, topImported=ctx.blk.topImported.bnStr, - bottom=bottom.bnStr - # Fetch blocks and verify result let blocks = (await buddy.blocksFetch(nFetchBodiesRequest, info)).valueOr: break fetchBlocksBody # done, exit this function @@ -236,9 +217,9 @@ proc blocksStagedCollect*( # block chain or similar.) ctx.pool.failedPeers.incl buddy.peerID - debug info & ": no blocks yet", peer, ctrl=buddy.ctrl.state, - poolMode=ctx.poolMode, syncState=ctx.pool.lastState, - failedPeers=ctx.pool.failedPeers.len, bdyErrors=buddy.bdyErrors + debug info & ": no blocks yet (failed peer)", peer, + failedPeers=ctx.pool.failedPeers.len, + syncState=($buddy.syncState), bdyErrors=buddy.bdyErrors return info "Queued/staged or imported blocks", @@ -265,29 +246,13 @@ template blocksStagedProcess*( proc blocksStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ## Some pool mode intervention. ## - ## One scenario is that some blocks do not have a matching header available. - ## The main reson might be that the queue of block lists had a gap so that - ## some blocks could not be imported. This in turn can happen when the `FC` - ## module was reset (e.g. by `CL` via RPC.) - ## - ## A reset by `CL` via RPC would mostly happen if the syncer is near the - ## top of the block chain anyway. So the savest way to re-org is to flush - ## the block queues as there won't be mant data cached, then. - ## - if ctx.blk.staged.len == 0 and - ctx.blocksUnprocIsEmpty(): - # nothing to do - return - - # Update counter - ctx.pool.nReorg.inc - - # Reset block queues - debug info & ": Flushing block queues", nUnproc=ctx.blocksUnprocTotal(), - nStagedQ=ctx.blk.staged.len, nReorg=ctx.pool.nReorg + if ctx.pool.lastState in {blocksCancel,blocksFinish}: + trace info & ": Flushing block queues", + nUnproc=ctx.blocksUnprocTotal(), nStagedQ=ctx.blk.staged.len - ctx.blocksUnprocClear() - ctx.blk.staged.clear() + ctx.blocksUnprocClear() + ctx.blk.staged.clear() + ctx.blk.cancelRequest = false # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim similarity index 94% rename from execution_chain/sync/beacon/worker/blocks_staged/bodies.nim rename to execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim index c4a81da9fe..fa52c94789 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/bodies.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim @@ -78,14 +78,15 @@ proc bodiesFetch*( if resp.isNone or buddy.ctrl.stopped: buddy.fetchRegisterError() trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=0, - elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, bdyErrors=buddy.bdyErrors + elapsed=elapsed.toStr, syncState=($buddy.syncState), + bdyErrors=buddy.bdyErrors return err() let b: seq[BlockBody] = resp.get.bodies if b.len == 0 or nReq < b.len: buddy.fetchRegisterError() trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, - elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, + elapsed=elapsed.toStr, syncState=($buddy.syncState), nRespErrors=buddy.only.nBdyRespErrors return err() @@ -99,7 +100,8 @@ proc bodiesFetch*( buddy.ctx.pool.blkLastSlowPeer = Opt.none(Hash) # not last one or not error trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, - elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, bdyErrors=buddy.bdyErrors + elapsed=elapsed.toStr, syncState=($buddy.syncState), + bdyErrors=buddy.bdyErrors return ok(b) diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim index eb4097aa70..a5b86f78b0 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim @@ -17,16 +17,13 @@ import ../../../../networking/p2p, ../../../wire_protocol/types, ../../worker_desc, - ../[blocks_unproc, helpers, update], - ./bodies + ../[blocks_unproc, update], + ./bodies_fetch # ------------------------------------------------------------------------------ # Private helpers # ------------------------------------------------------------------------------ -formatIt(Hash32): - it.short - proc getNthHash(ctx: BeaconCtxRef; blocks: seq[EthBlock]; n: int): Hash32 = ctx.hdrCache.getHash(blocks[n].header.number).valueOr: return zeroHash32 @@ -62,7 +59,7 @@ proc blocksFetchCheckImpl( # There is nothing one can do here info "Block header missing (reorg triggered)", peer, iv, n, nth=(iv.minPt + n).bnStr - ctx.poolMode = true # So require reorg + ctx.blk.cancelRequest = true # So require reorg return Opt.none(seq[EthBlock]) request.blockHashes[n - 1] = header.parentHash blocks[n].header = header @@ -70,7 +67,7 @@ proc blocksFetchCheckImpl( # There is nothing one can do here info "Block header missing (reorg triggered)", peer, iv, n=0, nth=iv.minPt.bnStr - ctx.poolMode = true # So require reorg + ctx.blk.cancelRequest = true # So require reorg return Opt.none(seq[EthBlock]) request.blockHashes[^1] = blocks[^1].header.computeBlockHash @@ -119,7 +116,7 @@ func blocksModeStopped*(ctx: BeaconCtxRef): bool = ## Helper, checks whether there is a general stop conditions based on ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) ctx.poolMode or - ctx.pool.lastState != processingBlocks + ctx.pool.lastState != blocks proc blocksFetch*( @@ -163,7 +160,7 @@ proc blocksImport*( trace info & ": Start importing blocks", peer=($maybePeer), iv, nBlocks=iv.len, base=ctx.chain.baseNumber.bnStr, - head=ctx.chain.latestNumber.bnStr, target=ctx.head.bnStr + head=ctx.chain.latestNumber.bnStr block loop: for n in 0 ..< blocks.len: @@ -171,7 +168,7 @@ proc blocksImport*( if nBn <= ctx.chain.baseNumber: trace info & ": ignoring block less eq. base", n, iv, nBlocks=iv.len, - nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n), + nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n).short, B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr ctx.blk.topImported = nBn # well, not really imported @@ -183,9 +180,10 @@ proc blocksImport*( # point, the `FC` module data area might have been moved to a new # canonical branch. # - ctx.poolMode = true + ctx.blk.cancelRequest = true # So require reorg warn info & ": import block error (reorg triggered)", n, iv, - nBlocks=iv.len, nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n), + nBlocks=iv.len, nthBn=nBn.bnStr, + nthHash=ctx.getNthHash(blocks, n).short, B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, `error`=error break loop @@ -204,7 +202,7 @@ proc blocksImport*( nBlocks=(ctx.blk.topImported - iv.minPt + 1), nFailed=(iv.maxPt - ctx.blk.topImported), base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr + target=ctx.head.bnStr, targetHash=ctx.headHash.short # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim b/execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim index a7809189fc..84e88b0fbf 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at # https://opensource.org/licenses/MIT). @@ -33,7 +33,8 @@ func blocksStagedQueueIsEmpty*(ctx: BeaconCtxRef): bool = func blocksStagedQueueClear*(ctx: BeaconCtxRef) = ## Clear queue - ctx.blk.staged.clear + ctx.blk.staged.clear() + ctx.blk.reserveStaged = 0 func blocksStagedQueueInit*(ctx: BeaconCtxRef) = ## Constructor diff --git a/execution_chain/sync/beacon/worker/headers_staged.nim b/execution_chain/sync/beacon/worker/headers_staged.nim index 8d2ab20e7f..fc1b761aed 100644 --- a/execution_chain/sync/beacon/worker/headers_staged.nim +++ b/execution_chain/sync/beacon/worker/headers_staged.nim @@ -15,7 +15,7 @@ import pkg/eth/common, pkg/stew/[interval_set, sorted_set], ../worker_desc, - ./headers_staged/[headers, staged_collect], + ./headers_staged/[headers_fetch, staged_collect], ./headers_unproc # ------------------------------------------------------------------------------ @@ -114,8 +114,7 @@ proc headersStagedCollect*( # might have been reset and prepared for the next stage. if ctx.collectModeStopped(): trace info & ": stopped fetching/storing headers", peer, iv, - bottom=bottom.bnStr, nStored, ctrl=buddy.ctrl.state, - syncState=ctx.pool.lastState, cacheMode=ctx.hdrCache.state + bottom=bottom.bnStr, nStored, syncState=($buddy.syncState) break fetchHeadersBody # done, exit this function # Commit partially processed block numbers @@ -127,7 +126,7 @@ proc headersStagedCollect*( debug info & ": fetched headers count", peer, unprocTop=ctx.headersUnprocAvailTop.bnStr, D=ctx.dangling.bnStr, - nStored, nStagedQ=ctx.hdr.staged.len, ctrl=buddy.ctrl.state + nStored, nStagedQ=ctx.hdr.staged.len, syncState=($buddy.syncState) # Buddy might have been cancelled while downloading headers. if buddy.ctrl.stopped: @@ -161,8 +160,7 @@ proc headersStagedCollect*( # might have been reset and prepared for the next stage. if ctx.collectModeStopped(): trace info & ": stopped fetching/staging headers", peer, iv, - bottom=bottom.bnStr, nStored, ctrl=buddy.ctrl.state, - syncState=ctx.pool.lastState, cacheMode=ctx.hdrCache.state + bottom=bottom.bnStr, nStored, syncState=($buddy.syncState) break fetchHeadersBody # done, exit this function # Store `lhc` chain on the `staged` queue if there is any @@ -189,9 +187,9 @@ proc headersStagedCollect*( # block chain or similar.) ctx.pool.failedPeers.incl buddy.peerID - debug info & ": no headers yet", peer, ctrl=buddy.ctrl.state, - cacheMode=ctx.hdrCache.state, syncState=ctx.pool.lastState, - failedPeers=ctx.pool.failedPeers.len, hdrErrors=buddy.hdrErrors + debug info & ": no headers yet (failed peer)", peer, + failedPeers=ctx.pool.failedPeers.len, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return info "Queued/staged or DB/stored headers", @@ -270,19 +268,11 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = proc headersStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = - ## Some pool mode intervention. The effect is that all concurrent peers - ## finish up their current work and run this function here (which might - ## do nothing.) Pool mode is used to sync peers, e.g. for a forced state - ## change. + ## Some pool mode intervention. ## - # Check for cancel request - if ctx.pool.lastState == cancelHeaders: - # Update counter - ctx.pool.nReorg.inc - - # Reset header queues - debug info & ": Flushing header queues", nUnproc=ctx.headersUnprocTotal(), - nStagedQ=ctx.hdr.staged.len, nReorg=ctx.pool.nReorg + if ctx.pool.lastState in {headersCancel,headersFinish}: + trace info & ": Flushing header queues", + nUnproc=ctx.headersUnprocTotal(), nStagedQ=ctx.hdr.staged.len ctx.headersUnprocClear() # clears `unprocessed` and `borrowed` list ctx.hdr.staged.clear() diff --git a/execution_chain/sync/beacon/worker/headers_staged/headers.nim b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim similarity index 94% rename from execution_chain/sync/beacon/worker/headers_staged/headers.nim rename to execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim index a8eeb4ab12..5775f94469 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/headers.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim @@ -106,8 +106,8 @@ proc headersFetchReversed*( if resp.isNone or buddy.ctrl.stopped: buddy.registerError() trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, - hash=topHash.toStr, nResp=0, elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, - hdrErrors=buddy.hdrErrors + hash=topHash.toStr, nResp=0, elapsed=elapsed.toStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return err() let h: seq[Header] = resp.get.headers @@ -115,7 +115,7 @@ proc headersFetchReversed*( buddy.registerError() trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, nResp=h.len, elapsed=elapsed.toStr, - ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return err() # Verify that first block number matches @@ -124,7 +124,7 @@ proc headersFetchReversed*( trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, ivReqMinPt=ivReq.minPt.bnStr, ivRespMinPt=h[^1].bnStr, nResp=h.len, elapsed=elapsed.toStr, - ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return err() # Ban an overly slow peer for a while when seen in a row. Also there is a @@ -137,7 +137,7 @@ proc headersFetchReversed*( trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, ivResp=BnRange.new(h[^1].number,h[0].number), - nResp=h.len, elapsed=elapsed.toStr, ctrl=buddy.ctrl.state, + nResp=h.len, elapsed=elapsed.toStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return ok(h) diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim index 9399f50fc8..e36d5eb9f1 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim @@ -15,7 +15,7 @@ import pkg/eth/common, pkg/stew/interval_set, ../../worker_desc, - ./headers + ./headers_fetch # ------------------------------------------------------------------------------ # Private logging helpers @@ -56,7 +56,7 @@ proc fetchRev( var rev = (await buddy.headersFetchReversed(ivReq, parent, info)).valueOr: buddy.updateBuddyErrorState() debug info & ": header fetch error", peer=buddy.peer, ivReq, - nReq=ivReq.len, parent=parent.toStr, ctrl=buddy.ctrl.state, + nReq=ivReq.len, parent=parent.toStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return err() ok(rev) @@ -85,7 +85,7 @@ func collectModeStopped*(ctx: BeaconCtxRef): bool = ## Helper, checks whether there is a general stop conditions based on ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) ctx.poolMode or - ctx.pool.lastState != collectingHeaders or + ctx.pool.lastState != headers or ctx.hdrCache.state != collecting @@ -129,7 +129,7 @@ proc collectAndStashOnDiskCache*( ctx.hdrCache.put(rev).isOkOr: buddy.updateBuddyProcError() debug info & ": header stash error", peer, iv, ivReq, - ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors, `error`=error + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, `error`=error break fetchHeadersBody # error => exit block # Note that `put()` might not have used all of the `rev[]` items for @@ -155,8 +155,9 @@ proc collectAndStashOnDiskCache*( parent = rev[^1].parentHash # parent hash for next fetch request # End loop - trace info & ": fetched and stored headers", peer, iv, - nHeaders=iv.len, ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + trace info & ": fetched and stored headers", peer, iv, nHeaders=iv.len, + D=ctx.dangling.bnStr, syncState=($buddy.syncState), + hdrErrors=buddy.hdrErrors # Reset header process errors (not too many consecutive failures this time) buddy.nHdrProcErrors = 0 # all OK, reset error count @@ -166,7 +167,8 @@ proc collectAndStashOnDiskCache*( trace info & ": partially fetched/stored headers", peer, iv=(if ivTop < iv.maxPt: BnRange.new(ivTop+1,iv.maxPt).bnStr else: "n/a"), - nHeaders=(iv.maxPt-ivTop), ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + nHeaders=(iv.maxPt-ivTop), D=ctx.dangling.bnStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return ivTop # there is some left over range @@ -216,8 +218,8 @@ proc collectAndStageOnMemQueue*( if rev[0].number != ivTop or rev[^1].number != ivBottom: buddy.updateBuddyProcError() debug info & ": header queue error", peer, iv, ivReq, - receivedHeaders=rev.bnStr, expected=(ivBottom,ivTop).bnStr, - ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + receivedHeaders=rev.bnStr, expected=(ivBottom,ivTop).bnStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors break fetchHeadersBody # error => exit block # Check/update hashes @@ -243,7 +245,8 @@ proc collectAndStageOnMemQueue*( # End loop trace info & ": fetched and staged all headers", peer, iv, - nHeaders=iv.len, ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors + D=ctx.dangling.bnStr, nHeaders=iv.len, syncState=($buddy.syncState), + hdrErrors=buddy.hdrErrors # Reset header process errors (not too many consecutive failures this time) buddy.nHdrProcErrors = 0 # all OK, reset error count @@ -254,8 +257,8 @@ proc collectAndStageOnMemQueue*( # Start processing some error or an incomplete fetch/stage result trace info & ": partially fetched and staged headers", peer, iv, - staged=lhc.bnStr, nHeaders=lhc.revHdrs.len, ctrl=buddy.ctrl.state, - hdrErrors=buddy.hdrErrors + D=ctx.dangling.bnStr, stagedHeaders=lhc.bnStr, nHeaders=lhc.revHdrs.len, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return ivTop # there is some left over range diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim index 6048063445..3bc48c0b85 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at # https://opensource.org/licenses/MIT). @@ -33,7 +33,8 @@ func headersStagedQueueIsEmpty*(ctx: BeaconCtxRef): bool = func headersStagedQueueClear*(ctx: BeaconCtxRef) = ## Clear queue - ctx.hdr.staged.clear + ctx.hdr.staged.clear() + ctx.hdr.reserveStaged = 0 func headersStagedQueueInit*(ctx: BeaconCtxRef) = ## Constructor diff --git a/execution_chain/sync/beacon/worker/headers_unproc.nim b/execution_chain/sync/beacon/worker/headers_unproc.nim index e677433657..9f958f85df 100644 --- a/execution_chain/sync/beacon/worker/headers_unproc.nim +++ b/execution_chain/sync/beacon/worker/headers_unproc.nim @@ -52,7 +52,6 @@ proc headersUnprocFetch*( doAssert ctx.hdr.borrowed.merge(iv) == iv.len ok(iv) - proc headersUnprocCommit*(ctx: BeaconCtxRef; iv: BnRange) = ## Commit back all processed range, i.e. remove it from the borrowed set. doAssert ctx.hdr.borrowed.reduce(iv) == iv.len diff --git a/execution_chain/sync/beacon/worker/helpers.nim b/execution_chain/sync/beacon/worker/helpers.nim index b9f811c7ea..ea4fb57201 100644 --- a/execution_chain/sync/beacon/worker/helpers.nim +++ b/execution_chain/sync/beacon/worker/helpers.nim @@ -13,12 +13,12 @@ ## Extracted helpers from `worker_desc` (avoids circular import) import - pkg/[chronos, results], - pkg/eth/common, - pkg/stew/interval_set, + pkg/[chronos, eth/common, results, stew/interval_set], + ../../../core/chain, ../../../networking/p2p, - ../../../utils/prettify, - ../../../utils/utils + ../../../utils/[prettify, utils], + ../../sync_desc, + ../worker_const export prettify, short @@ -61,4 +61,10 @@ func `$`*(w: Interval[BlockNumber,uint64]): string = func `$`*(w: Opt[Peer]): string = if w.isSome: $w.value else: "n/a" +func `$`*(w: (SyncState,HeaderChainMode,bool)): string = + $w[0] & "." & $w[1] & (if w[2]: ":" & "poolMode" else: "") + +func `$`*(w: (BuddyRunState,SyncState,HeaderChainMode,bool)): string = + $w[0] & ":" & $(w[1],w[2],w[3]) + # End diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index 37276997ef..dc225359ef 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -29,7 +29,7 @@ type proc querySyncProgress(ctx: BeaconCtxRef): SyncStateData = ## Syncer status query function (for call back closure) - if collectingHeaders <= ctx.pool.lastState: + if headers <= ctx.pool.lastState: return (ctx.chain.baseNumber, ctx.dangling.number, ctx.head.number) # (0,0,0) diff --git a/execution_chain/sync/beacon/worker/update.nim b/execution_chain/sync/beacon/worker/update.nim index 0512e716d4..6458910f17 100644 --- a/execution_chain/sync/beacon/worker/update.nim +++ b/execution_chain/sync/beacon/worker/update.nim @@ -17,81 +17,45 @@ import ../worker_desc, ./blocks_staged/staged_queue, ./headers_staged/staged_queue, - ./[blocks_unproc, headers_unproc, helpers] + ./[blocks_unproc, headers_unproc] # ------------------------------------------------------------------------------ -# Private helpers -# ------------------------------------------------------------------------------ - -func statePair(a, b: SyncLayoutState): int = - ## Represent state pair `(a,b)` as a single entity - a.ord * 100 + b.ord - -func statePair(a: SyncLayoutState): int = - a.statePair a - -# ------------------------------------------------------------------------------ -# Private functions, state handlers +# Private functions, state handler helpers # ------------------------------------------------------------------------------ proc startHibernating(ctx: BeaconCtxRef; info: static[string]) = ## Clean up sync scrum target buckets and await a new request from `CL`. ## - ctx.pool.lastState.reset - ctx.pool.clReq.reset - ctx.headersUnprocClear() - ctx.blocksUnprocClear() - ctx.headersStagedQueueClear() - ctx.blocksStagedQueueClear() + doAssert ctx.blocksUnprocIsEmpty() + doAssert ctx.blocksStagedQueueIsEmpty() + doAssert ctx.headersUnprocIsEmpty() + doAssert ctx.headersStagedQueueIsEmpty() + + ctx.hdrCache.clear() + ctx.pool.clReq.reset ctx.pool.failedPeers.clear() ctx.pool.seenData = false - ctx.hdrCache.clear() - ctx.hibernate = true info "Suspending syncer", base=ctx.chain.baseNumber.bnStr, - head=ctx.chain.latestNumber.bnStr, - nSyncPeers=ctx.pool.nBuddies + head=ctx.chain.latestNumber.bnStr, nSyncPeers=ctx.pool.nBuddies proc commitCollectHeaders(ctx: BeaconCtxRef; info: static[string]): bool = ## Link header chain into `FC` module. Gets ready for block import. - let - b = ctx.chain.baseNumber() - l = ctx.chain.latestNumber() - h = ctx.head.number # This function does the job linking into `FC` module proper ctx.hdrCache.commit().isOkOr: - trace info & ": cannot commit header chain", B=b.bnStr, L=l.bnStr, - D=ctx.dangling.bnStr, H=h.bnStr, `error`=error + trace info & ": cannot finalise header chain", + B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, + D=ctx.dangling.bnStr, H=ctx.head.bnStr, `error`=error return false - trace info & ": header chain linked into FC", B=b.bnStr, L=l.bnStr, - D=ctx.dangling.bnStr, H=h.bnStr - true -proc setupFinishedHeaders(ctx: BeaconCtxRef; info: static[string]) = - ## Trivial state transition handler - ctx.headersUnprocClear() - ctx.headersStagedQueueClear() - ctx.pool.lastState = finishedHeaders - -proc setupCancelHeaders(ctx: BeaconCtxRef; info: static[string]) = - ## Trivial state transition handler - ctx.pool.lastState = cancelHeaders - ctx.poolMode = true # reorg, clear header queues - -proc setupCancelBlocks(ctx: BeaconCtxRef; info: static[string]) = - ## Trivial state transition handler - ctx.pool.lastState = cancelBlocks - ctx.poolMode = true # reorg, clear block queues - - proc setupProcessingBlocks(ctx: BeaconCtxRef; info: static[string]) = doAssert ctx.blocksUnprocIsEmpty() doAssert ctx.blocksStagedQueueIsEmpty() @@ -109,109 +73,145 @@ proc setupProcessingBlocks(ctx: BeaconCtxRef; info: static[string]) = ctx.blocksUnprocSet(d, h) ctx.blk.topImported = d - 1 - # State transition - ctx.pool.lastState = processingBlocks - - trace info & ": collecting block bodies", iv=BnRange.new(d+1, h) - # ------------------------------------------------------------------------------ -# Public functions +# Private state transition handlers # ------------------------------------------------------------------------------ -proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = - ## Update internal state when needed +func idleNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if ctx.hdrCache.state == collecting: + return SyncState.headers + idle - # Calculate the pair `(prevState,thisState)` in order to check for a state - # change. - let prevState = ctx.pool.lastState # previous state - var thisState = # figure out current state - if prevState in {cancelHeaders,cancelBlocks}: - prevState # no need to change state here - else: - case ctx.hdrCache.state: # currently observed state, the std way - of collecting: collectingHeaders - of ready: finishedHeaders - of locked: processingBlocks - else: idleSyncState - - # Handle same state cases first (i.e. no state change.) Depending on the - # context, a new state might be forced so that there will be a state change. - case statePair(prevState, thisState) - of statePair(collectingHeaders): - if ctx.pool.seenData or # checks for cul-de-sac syncing - ctx.pool.failedPeers.len <= fetchHeadersFailedInitialFailPeersHwm: - return +proc headersNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if not ctx.pool.seenData and # checks for cul-de-sac syncing + fetchHeadersFailedInitialFailPeersHwm < ctx.pool.failedPeers.len: debug info & ": too many failed header peers", failedPeers=ctx.pool.failedPeers.len, limit=fetchHeadersFailedInitialFailPeersHwm - thisState = cancelHeaders - # proceed + return headersCancel - of statePair(cancelHeaders): # was not assigned by `syncState()` - if not ctx.headersBorrowedIsEmpty(): # wait for peers to reorg in `poolMode` - return - thisState = idleSyncState # will continue hibernating - # proceed - - of statePair(finishedHeaders): - if ctx.commitCollectHeaders(info): # commit downloading headers - thisState = processingBlocks - else: - thisState = idleSyncState # will continue hibernating - # proceed - - of statePair(processingBlocks): - if not ctx.pool.seenData and # checks for cul-de-sac syncing - fetchBodiesFailedInitialFailPeersHwm < ctx.pool.failedPeers.len: - debug info & ": too many failed block peers", - failedPeers=ctx.pool.failedPeers.len, - limit=fetchBodiesFailedInitialFailPeersHwm - thisState = cancelBlocks - # proceed - elif ctx.blocksStagedQueueIsEmpty() and - ctx.blocksUnprocIsEmpty(): - thisState = idleSyncState # will continue hibernating - # proceed - else: - return + if ctx.hdrCache.state == collecting: + return SyncState.headers - of statePair(cancelBlocks): - if not ctx.blocksBorrowedIsEmpty(): # wait for peers to reorg in `poolMode` - return - thisState = idleSyncState # will continue hibernating - # proceed + if ctx.hdrCache.state == ready: + return headersFinish - elif prevState == thisState: - return + headersCancel - # Process state transition - case statePair(prevState, thisState) - of statePair(collectingHeaders, cancelHeaders): - ctx.setupCancelHeaders info # cancel header download - thisState = ctx.pool.lastState # assign result from state handler +func headersCancelNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if ctx.poolMode: # wait for peers to sync in `poolMode` + return headersCancel + idle # will continue hibernating - of statePair(finishedHeaders, processingBlocks): - ctx.setupProcessingBlocks info # start downloading block bodies - thisState = ctx.pool.lastState # assign result from state handler +proc headersFinishNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if ctx.poolMode: # wait for peers to sync in `poolMode` + return headersFinish - of statePair(processingBlocks, cancelBlocks): - ctx.setupCancelBlocks info # cancel blocks download - thisState = ctx.pool.lastState # assign result from state handler + if ctx.hdrCache.state == ready: + if ctx.commitCollectHeaders info: # commit downloading headers + ctx.setupProcessingBlocks info # initialise blocks processing + return SyncState.blocks # transition to blocks processing - of statePair(collectingHeaders, finishedHeaders): - ctx.setupFinishedHeaders info # call state handler - thisState = ctx.pool.lastState # assign result from state handler + idle # will continue hibernating - else: - # Use transition as is (no handler) - discard +proc blocksNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if not ctx.pool.seenData and # checks for cul-de-sac syncing + fetchBodiesFailedInitialFailPeersHwm < ctx.pool.failedPeers.len: + debug info & ": too many failed block peers", + failedPeers=ctx.pool.failedPeers.len, + limit=fetchBodiesFailedInitialFailPeersHwm + return blocksCancel + + if ctx.blk.cancelRequest: + return blocksCancel + + if ctx.blocksStagedQueueIsEmpty() and + ctx.blocksUnprocIsEmpty(): + return blocksFinish + + SyncState.blocks + +func blocksCancelNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if ctx.poolMode: # wait for peers to sync in `poolMode` + return blocksCancel + idle # will continue hibernating - info "Sync state changed", prevState, thisState, - base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr, targetHash=ctx.headHash.short +func blocksFinishNext(ctx: BeaconCtxRef; info: static[string]): SyncState = + ## State transition handler + if ctx.poolMode: # wait for peers to sync in `poolMode` + return blocksCancel + idle # will continue hibernating + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = + ## Update internal state when needed + # + # State machine + # :: + # idle <---------------+---+---+---. + # | ^ ^ ^ | + # v | | | | + # headers -> headersCancel | | | + # | | | | + # v | | | + # headersFinish -----------' | | + # | | | + # v | | + # blocks -> blocksCancel ------' | + # | | + # v | + # blocksFinish --------------------' + # + let newState = + case ctx.pool.lastState: + of idle: + ctx.idleNext info + + of SyncState.headers: + ctx.headersNext info + + of headersCancel: + ctx.headersCancelNext info + + of headersFinish: + ctx.headersFinishNext info + + of SyncState.blocks: + ctx.blocksNext info + + of blocksCancel: + ctx.blocksCancelNext info + + of blocksFinish: + ctx.blocksFinishNext info + + if ctx.pool.lastState == newState: + return + + let prevState = ctx.pool.lastState + ctx.pool.lastState = newState + + # Most states require synchronisation via `poolMode` + if newState notin {idle, SyncState.headers, SyncState.blocks}: + ctx.poolMode = true + info "State change, waiting for sync", prevState, newState, + nSyncPeers=ctx.pool.nBuddies + else: + info "State changed", prevState, newState, + base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, + target=ctx.head.bnStr, targetHash=ctx.headHash.short # Final sync scrum layout reached or inconsistent/impossible state - if thisState == idleSyncState: + if newState == idle: ctx.startHibernating info @@ -226,7 +226,7 @@ proc updateFromHibernateSetTarget*( # Exclude the case of a single header chain which would be `T` only if b+1 < t: - ctx.pool.lastState = collectingHeaders # state transition + ctx.pool.lastState = SyncState.headers # state transition ctx.hibernate = false # wake up # Update range diff --git a/execution_chain/sync/beacon/worker/update/ticker.nim b/execution_chain/sync/beacon/worker/update/ticker.nim index 748b93faa7..e9c7fb6dd0 100644 --- a/execution_chain/sync/beacon/worker/update/ticker.nim +++ b/execution_chain/sync/beacon/worker/update/ticker.nim @@ -21,7 +21,6 @@ when enableTicker: pkg/[stint, stew/interval_set], ../headers_staged/staged_queue, ../blocks_staged/staged_queue, - ../helpers, ../[blocks_unproc, headers_unproc] logScope: @@ -49,9 +48,9 @@ type nBlkUnprocFragm: int nBlkStaged: int blkStagedBottom: BlockNumber + blkTopImported: BlockNumber - state: SyncLayoutState - reorg: int + state: SyncState nBuddies: int TickerRef* = ref object of RootRef @@ -78,7 +77,7 @@ when enableTicker: dangling: ctx.dangling.number, head: ctx.head.number, target: ctx.consHeadNumber, - activeOk: ctx.pool.lastState != idleSyncState, + activeOk: ctx.pool.lastState != idle, nHdrStaged: ctx.headersStagedQueueLen(), hdrStagedTop: ctx.headersStagedQueueTopKey(), @@ -91,9 +90,9 @@ when enableTicker: blkUnprocBottom: ctx.blocksUnprocTotalBottom(), nBlkUnprocessed: ctx.blocksUnprocTotal(), nBlkUnprocFragm: ctx.blk.unprocessed.chunks(), + blkTopImported: ctx.blk.topImported, state: ctx.pool.lastState, - reorg: ctx.pool.nReorg, nBuddies: ctx.pool.nBuddies) proc tickerLogger(t: TickerRef; ctx: BeaconCtxRef) = @@ -109,6 +108,7 @@ when enableTicker: let B = if data.base == data.latest: "L" else: data.base.bnStr L = if data.latest == data.coupler: "C" else: data.latest.bnStr + I = if data.blkTopImported == 0: "n/a" else : data.blkTopImported.bnStr C = if data.coupler == data.dangling: "D" elif data.coupler < high(int64).uint64: data.coupler.bnStr else: "n/a" @@ -143,13 +143,14 @@ when enableTicker: else: bS & "<-" & bU st = case data.state - of idleSyncState: "0" - of collectingHeaders: "h" - of cancelHeaders: "x" - of finishedHeaders: "f" - of processingBlocks: "b" - of cancelBlocks: "z" - rrg = data.reorg + of idle: "0" + of headers: "h" + of headersCancel: "x" + of headersFinish: "f" + of blocks: "b" + of blocksCancel: "x" + of blocksFinish: "f" + nP = data.nBuddies # With `int64`, there are more than 29*10^10 years range for seconds @@ -159,7 +160,21 @@ when enableTicker: t.lastStats = data t.visited = now - debug "Sync state", up, nP, st, B, L, C, D, H, T, hQ, bQ, rrg, mem + case data.state + of idle: + debug "Sync state idle", up, nP, B, L, + D, H, T, hQ, bQ, + mem + + of headers, headersCancel, headersFinish: + debug "Sync state headers", up, nP, st, B, L, + C, D, H, T, hQ, + mem + + of blocks, blocksCancel, blocksFinish: + debug "Sync state blocks", up, nP, st, B, L, + D, I, H, T, bQ, + mem # ------------------------------------------------------------------------------ # Public function diff --git a/execution_chain/sync/beacon/worker_config.nim b/execution_chain/sync/beacon/worker_const.nim similarity index 91% rename from execution_chain/sync/beacon/worker_config.nim rename to execution_chain/sync/beacon/worker_const.nim index 36b8527e10..802abe9bcf 100644 --- a/execution_chain/sync/beacon/worker_config.nim +++ b/execution_chain/sync/beacon/worker_const.nim @@ -13,6 +13,15 @@ import pkg/chronos +type SyncState* = enum + idle = 0 ## see clause *(8)*, *(12)* of `README.md` + headers ## see clauses *(5)*, *(9)* of `README.md` + headersCancel ## stop this scrum + headersFinish ## see clause *(10)* of `README.md` + blocks ## see clause *(11)* of `README.md` + blocksCancel ## stop this syncer scrum + blocksFinish ## get ready for `idle` + const enableTicker* = false ## Log regular status updates similar to metrics. Great for debugging. diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index afc7c15764..27fcec56ef 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -17,10 +17,10 @@ import ../../core/chain, ../sync_desc, ./worker/helpers, - ./worker_config + ./worker_const export - helpers, sync_desc, worker_config, chain + helpers, sync_desc, worker_const, chain type BnRangeSet* = IntervalSetRef[BlockNumber,uint64] @@ -51,14 +51,6 @@ type # ------------------- - SyncLayoutState* = enum - idleSyncState = 0 ## see clause *(8)*, *(12)* of `README.md` - collectingHeaders ## see clauses *(5)*, *(9)* of `README.md` - cancelHeaders ## stop this scrum - finishedHeaders ## see clause *(10)* of `README.md` - processingBlocks ## see clause *(11)* of `README.md` - cancelBlocks ## stop this scrum - SyncClMesg* = object ## Beacon state message used for manual first target set up consHead*: Header ## Consensus head @@ -80,6 +72,7 @@ type topImported*: BlockNumber ## For syncronising opportunistic import staged*: StagedBlocksQueue ## Blocks ready for import reserveStaged*: int ## Pre-book staged slot temporarily + cancelRequest*: bool ## Cancel block sync via state machine # ------------------- @@ -98,7 +91,7 @@ type ## Globally shared data extension nBuddies*: int ## Number of active workers clReq*: SyncClMesg ## Manual first target set up - lastState*: SyncLayoutState ## Last known layout state + lastState*: SyncState ## Last known layout state hdrSync*: HeaderFetchSync ## Syncing by linked header chains blkSync*: BlocksFetchSync ## For importing/executing blocks nextMetricsUpdate*: Moment ## For updating metrics @@ -108,7 +101,6 @@ type hdrCache*: HeaderChainRef ## Currently in tandem with `chain` # Info, debugging, and error handling stuff - nReorg*: int ## Number of reorg invocations (info only) hdrProcError*: Table[Hash,uint8] ## Some globally accessible header errors blkLastSlowPeer*: Opt[Hash] ## Register slow peer when last one failedPeers*: HashSet[Hash] ## Detect dead end sync by collecting peers @@ -172,6 +164,23 @@ func db*(ctx: BeaconCtxRef): CoreDbRef = # ----- +func syncState*( + ctx: BeaconCtxRef; + ): (SyncState,HeaderChainMode,bool) = + ## Getter, triple of relevant run-time states + (ctx.pool.lastState, + ctx.hdrCache.state, + ctx.poolMode) + +func syncState*( + buddy: BeaconBuddyRef; + ): (BuddyRunState,SyncState,HeaderChainMode,bool) = + ## Getter, also includes buddy state + (buddy.ctrl.state, + buddy.ctx.pool.lastState, + buddy.ctx.hdrCache.state, + buddy.ctx.poolMode) + func hibernate*(ctx: BeaconCtxRef): bool = ## Getter, re-interpretation of the daemon flag for reduced service mode # No need for running the daemon with reduced service mode. So it is From 13c6924600165d3351ae93eaa35fafb2736b7869 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Fri, 23 May 2025 22:30:43 +0200 Subject: [PATCH 036/138] Portal: allow beacon lc to try each individual update in an offer (#3326) --- portal/network/beacon/beacon_network.nim | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index cc6785b662..c934620bb9 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -315,15 +315,18 @@ proc validateContent( # This means that no backfill is possible, for that we need updates that # get provided with a proof against historical_summaries, see also: # https://github.com/ethereum/portal-network-specs/issues/305 - # It is however a little more tricky, even updates that we do not have - # applied yet may fail here if the list of updates does not contain first - # the next update that is required currently for the sync. + # TODO: The light client will try to apply each update in dumb fashion. + # Could check the status of the sync and the content key to start from the + # exact right position. + var unverifiedUpdate = false for update in updates: - let res = await n.processor.updateVerifier(update) - if res.isErr(): - return err("Error verifying LC updates: " & $res.error) + (await n.processor.updateVerifier(update)).isOkOr: + unverifiedUpdate = true - ok() + if unverifiedUpdate: + err("Error verifying LC updates: could not verify all updates") + else: + ok() of lightClientFinalityUpdate: let update = decodeLightClientFinalityUpdateForked(n.forkDigests, content).valueOr: return err("Error decoding content: " & error) From 20cd0c92ea59d1535e57df2d96417e037a6a3437 Mon Sep 17 00:00:00 2001 From: tersec Date: Mon, 26 May 2025 08:42:00 +0000 Subject: [PATCH 037/138] rm useless windeps.dll downloading; make syncer batch size --debug (#3328) * rm useless windeps.dll downloading; make syncer batch size --debug * add arm64 platform detection The reasons for removing windeps.zip downloading are primarily twofold: it's a useless/unused network fetch which could fail; and given that, that it contains an EOL OpenSSL 1.1 DLL makes it potential attack surface to leave in the CI path. There's no point (at least initially for sure, but probably ever) in supporting x86 macOS. It's rapidly fading. If nothing else, the defaults should be switched to ARM macOS, and if x86 really needs to be re-added, that can be looked at later. But right now the CI should be focusing on testing ARM. --persist-batch-size looks very much like a debugging option we probably won't want to ship/document as an end-user supported command-line flag, so signal that appropriately. As with x86 macOS, this can be reviewed later, but as we approach an initial alpha release, it's better to err on the side of not having command-line options which are/were largely debugging artifacts enshrined in the supported set. --- .github/workflows/ci.yml | 27 --------------------- .github/workflows/nimbus_verified_proxy.yml | 23 +++--------------- .github/workflows/portal.yml | 23 +++--------------- .github/workflows/simulators.yml | 20 +-------------- config.nims | 2 ++ execution_chain/config.nim | 4 +-- 6 files changed, 11 insertions(+), 88 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f6e456579..e64ebdbec8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -106,12 +106,6 @@ jobs: [[ -z "$ncpu" || $ncpu -le 0 ]] && ncpu=1 echo "ncpu=${ncpu}" >> $GITHUB_ENV - if [[ '${{ matrix.target.evmc }}' == 'evmc' ]]; then - echo "ENABLE_EVMC=1" >> $GITHUB_ENV - else - echo "ENABLE_EVMC=0" >> $GITHUB_ENV - fi - - name: Install build dependencies (Macos) # Some home brew modules were reported missing if: runner.os == 'Macos' @@ -127,16 +121,6 @@ jobs: path: external/mingw-${{ matrix.target.cpu }} key: 'mingw-llvm-17-${{ matrix.target.cpu }}' - - name: Restore Nim DLLs dependencies (Windows) from cache - if: runner.os == 'Windows' - id: windows-dlls-cache - uses: actions/cache@v4 - with: - path: external/dlls-${{ matrix.target.cpu }} - # according to docu, idle caches are kept for up to 7 days - # so change dlls# to force new cache contents (for some number #) - key: dlls1-${{ matrix.target.cpu }} - - name: Install llvm-mingw dependency (Windows) if: > steps.windows-mingw-cache.outputs.cache-hit != 'true' && @@ -155,22 +139,11 @@ jobs: 7z x -y "external/mingw-${{ matrix.target.cpu }}.zip" -oexternal/mingw-${{ matrix.target.cpu }}/ mv external/mingw-${{ matrix.target.cpu }}/**/* ./external/mingw-${{ matrix.target.cpu }} - - name: Install DLLs dependencies (Windows) - if: > - steps.windows-dlls-cache.outputs.cache-hit != 'true' && - runner.os == 'Windows' - run: | - DLLPATH=external/dlls-${{ matrix.target.cpu }} - mkdir -p external - curl -L "https://nim-lang.org/download/windeps.zip" -o external/windeps.zip - 7z x -y external/windeps.zip -o"$DLLPATH" - - name: Path to cached dependencies (Windows) if: > runner.os == 'Windows' run: | echo '${{ github.workspace }}'"/external/mingw-${{ matrix.target.cpu }}/bin" >> $GITHUB_PATH - echo '${{ github.workspace }}'"/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH - name: Get latest nimbus-build-system commit hash id: versions diff --git a/.github/workflows/nimbus_verified_proxy.yml b/.github/workflows/nimbus_verified_proxy.yml index 3ffc68fa4d..aff2b8f122 100644 --- a/.github/workflows/nimbus_verified_proxy.yml +++ b/.github/workflows/nimbus_verified_proxy.yml @@ -44,7 +44,7 @@ jobs: # - os: linux # cpu: i386 - os: macos - cpu: amd64 + cpu: arm64 - os: windows cpu: amd64 include: @@ -72,6 +72,8 @@ jobs: run: | if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then PLATFORM=x64 + elif [[ '${{ matrix.target.cpu }}' == 'arm64' ]]; then + PLATFORM=arm64 else PLATFORM=x86 fi @@ -133,14 +135,6 @@ jobs: path: external/mingw-${{ matrix.target.cpu }} key: 'mingw-llvm-17-${{ matrix.target.cpu }}' - - name: Restore Nim DLLs dependencies (Windows) from cache - if: runner.os == 'Windows' - id: windows-dlls-cache - uses: actions/cache@v4 - with: - path: external/dlls-${{ matrix.target.cpu }} - key: 'dlls-${{ matrix.target.cpu }}-verified-proxy' - - name: Install llvm-mingw dependency (Windows) if: > steps.windows-mingw-cache.outputs.cache-hit != 'true' && @@ -159,22 +153,11 @@ jobs: 7z x -y "external/mingw-${{ matrix.target.cpu }}.zip" -oexternal/mingw-${{ matrix.target.cpu }}/ mv external/mingw-${{ matrix.target.cpu }}/**/* ./external/mingw-${{ matrix.target.cpu }} - - name: Install DLLs dependencies (Windows) - if: > - steps.windows-dlls-cache.outputs.cache-hit != 'true' && - runner.os == 'Windows' - run: | - DLLPATH=external/dlls-${{ matrix.target.cpu }} - mkdir -p external - curl -L "https://nim-lang.org/download/windeps.zip" -o external/windeps.zip - 7z x -y external/windeps.zip -o"$DLLPATH" - - name: Path to cached dependencies (Windows) if: > runner.os == 'Windows' run: | echo '${{ github.workspace }}'"/external/mingw-${{ matrix.target.cpu }}/bin" >> $GITHUB_PATH - echo '${{ github.workspace }}'"/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH - name: Get latest nimbus-build-system commit hash id: versions diff --git a/.github/workflows/portal.yml b/.github/workflows/portal.yml index 7a3de17fdd..6939465ca2 100644 --- a/.github/workflows/portal.yml +++ b/.github/workflows/portal.yml @@ -113,7 +113,7 @@ jobs: - os: linux cpu: amd64 - os: macos - cpu: amd64 + cpu: arm64 - os: windows cpu: amd64 include: @@ -141,6 +141,8 @@ jobs: run: | if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then PLATFORM=x64 + elif [[ '${{ matrix.target.cpu }}' == 'arm64' ]]; then + PLATFORM=arm64 else PLATFORM=x86 fi @@ -201,14 +203,6 @@ jobs: path: external/mingw-${{ matrix.target.cpu }} key: 'mingw-llvm-17-${{ matrix.target.cpu }}' - - name: Restore Nim DLLs dependencies (Windows) from cache - if: runner.os == 'Windows' - id: windows-dlls-cache - uses: actions/cache@v4 - with: - path: external/dlls-${{ matrix.target.cpu }} - key: 'dlls-${{ matrix.target.cpu }}-portal' - - name: Install llvm-mingw dependency (Windows) if: > steps.windows-mingw-cache.outputs.cache-hit != 'true' && @@ -227,22 +221,11 @@ jobs: 7z x -y "external/mingw-${{ matrix.target.cpu }}.zip" -oexternal/mingw-${{ matrix.target.cpu }}/ mv external/mingw-${{ matrix.target.cpu }}/**/* ./external/mingw-${{ matrix.target.cpu }} - - name: Install DLLs dependencies (Windows) - if: > - steps.windows-dlls-cache.outputs.cache-hit != 'true' && - runner.os == 'Windows' - run: | - DLLPATH=external/dlls-${{ matrix.target.cpu }} - mkdir -p external - curl -L "https://nim-lang.org/download/windeps.zip" -o external/windeps.zip - 7z x -y external/windeps.zip -o"$DLLPATH" - - name: Path to cached dependencies (Windows) if: > runner.os == 'Windows' run: | echo '${{ github.workspace }}'"/external/mingw-${{ matrix.target.cpu }}/bin" >> $GITHUB_PATH - echo '${{ github.workspace }}'"/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH - name: Get latest nimbus-build-system commit hash id: versions diff --git a/.github/workflows/simulators.yml b/.github/workflows/simulators.yml index 3cd8e96a89..ffd08c901f 100644 --- a/.github/workflows/simulators.yml +++ b/.github/workflows/simulators.yml @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -109,23 +109,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Restore Nim DLLs dependencies from cache - id: windows-dlls-cache - uses: actions/cache@v4 - with: - path: external/dlls - # according to docu, idle caches are kept for up to 7 days - # so change dlls# to force new cache contents (for some number #) - key: dlls1 - - - name: Install DLLs dependencies - if: steps.windows-dlls-cache.outputs.cache-hit != 'true' - run: | - DLLPATH=external/dlls - mkdir -p external - curl -L "https://nim-lang.org/download/windeps.zip" -o external/windeps.zip - 7z x -y external/windeps.zip -o"$DLLPATH" - - name: Restore llvm-mingw from cache id: windows-mingw-cache uses: actions/cache@v4 @@ -146,7 +129,6 @@ jobs: - name: Path to cached dependencies run: | echo '${{ github.workspace }}'"/external/mingw-amd64/bin" >> $GITHUB_PATH - echo '${{ github.workspace }}'"/external/dlls" >> $GITHUB_PATH - name: Get latest nimbus-build-system commit hash id: versions diff --git a/config.nims b/config.nims index 52549f8d0f..7a67d1f935 100644 --- a/config.nims +++ b/config.nims @@ -164,6 +164,8 @@ if canEnableDebuggingSymbols: --define:nimOldCaseObjects # https://github.com/status-im/nim-confutils/issues/9 +switch("warningAsError", "BareExcept:on") + # `switch("warning[CaseTransition]", "off")` fails with "Error: invalid command line option: '--warning[CaseTransition]'" switch("warning", "CaseTransition:off") diff --git a/execution_chain/config.nim b/execution_chain/config.nim index be59d1817b..6f31de76e8 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -340,9 +340,9 @@ type name: "num-threads" .}: int persistBatchSize* {. - desc: "" + hidden defaultValue: 32'u64 - name: "persist-batch-size" .}: uint64 + name: "debug-persist-batch-size" .}: uint64 beaconSyncTargetFile* {. hidden From a44c1d5b9199396c302533232ab221a6f2e92068 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Mon, 26 May 2025 21:37:28 +0800 Subject: [PATCH 038/138] Portal Client: Add accept codes chart to Granfana monitoring dashboard (#3330) --- .../grafana/portal_grafana_dashboard.json | 113 +++++++++++++++++- 1 file changed, 112 insertions(+), 1 deletion(-) diff --git a/portal/metrics/grafana/portal_grafana_dashboard.json b/portal/metrics/grafana/portal_grafana_dashboard.json index 85d487ffb6..118ee82fc5 100644 --- a/portal/metrics/grafana/portal_grafana_dashboard.json +++ b/portal/metrics/grafana/portal_grafana_dashboard.json @@ -2059,6 +2059,117 @@ "title": "Neighborhood gossip content offers", "type": "timeseries" }, + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 44 + }, + "id": 44, + "options": { + "alertThreshold": true, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "hideZeros": false, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "exemplar": false, + "expr": "rate(portal_offer_accept_codes_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", + "interval": "", + "legendFormat": "portal_offer_accept_codes[protocol_id={{protocol_id}, accept_code={{accept_code}}]", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "P6693426190CB2316" + }, + "exemplar": false, + "expr": "rate(portal_handle_offer_accept_codes_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", + "hide": false, + "interval": "", + "legendFormat": "portal_handle_offer_accept_codes[protocol_id={{protocol_id}, accept_code={{accept_code}}]", + "refId": "B" + } + ], + "title": "", + "type": "timeseries" + }, { "datasource": { "type": "prometheus", @@ -3156,4 +3267,4 @@ "uid": "iWQQPuPnkadsf", "version": 4, "weekStart": "" -} \ No newline at end of file +} From 61d7742ee39d72dd9a5ea68e01b28e3f53fe5e5f Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Mon, 26 May 2025 21:48:57 +0800 Subject: [PATCH 039/138] Portal Client: Improve node status log (#3327) --- portal/network/portal_node.nim | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/portal/network/portal_node.nim b/portal/network/portal_node.nim index b0bb6c9ae4..18c0d0917d 100644 --- a/portal/network/portal_node.nim +++ b/portal/network/portal_node.nim @@ -21,6 +21,8 @@ import ./history/[history_network, history_content], ./state/[state_network, state_content] +from eth/p2p/discoveryv5/routing_table import logDistance + export beacon_light_client, history_network, state_network, portal_protocol_config, forks @@ -203,12 +205,16 @@ proc statusLogLoop(n: PortalNode) {.async: (raises: []).} = # drop a lot when using the logbase2 scale, namely `/ 2` per 1 logaritmic # radius drop. # TODO: Get some float precision calculus? - let radiusPercentage = n.contentDB.dataRadius div (UInt256.high() div u256(100)) + let + radius = n.contentDB.dataRadius + radiusPercentage = radius div (UInt256.high() div u256(100)) + logRadius = logDistance(radius, u256(0)) info "Portal node status", + dbSize = $(n.contentDB.size() div 1_000_000) & "mb", radiusPercentage = radiusPercentage.toString(10) & "%", - radius = n.contentDB.dataRadius.toHex(), - dbSize = $(n.contentDB.size() div 1000) & "kb" + radius = radius.toHex(), + logRadius await sleepAsync(60.seconds) except CancelledError: From b7c89e07644845e51c3e8ce6fecdc9ff153de150 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Mon, 26 May 2025 15:52:32 +0200 Subject: [PATCH 040/138] Portal bridge: fixes and refactor beacon bridge (#3329) - Rework bridge result error handling fixing also an invalid res.error access - Fix bug where List of LC updates would have the wrong fork digest if it would be updates from different forks - Fix order of injecting LC updates (backfill) to be more likely accepted --- portal/bridge/beacon/portal_beacon_bridge.nim | 174 +++++++++--------- .../network/beacon/content/content_values.nim | 15 +- .../test_beacon_content.nim | 22 +-- .../test_beacon_network.nim | 5 +- .../eth_data_exporter/cl_data_exporter.nim | 4 +- 5 files changed, 112 insertions(+), 108 deletions(-) diff --git a/portal/bridge/beacon/portal_beacon_bridge.nim b/portal/bridge/beacon/portal_beacon_bridge.nim index b23f85c5ae..d4ca624825 100644 --- a/portal/bridge/beacon/portal_beacon_bridge.nim +++ b/portal/bridge/beacon/portal_beacon_bridge.nim @@ -96,36 +96,32 @@ proc gossipLCUpdates( return err("Unable to download LC updates: " & exc.msg) if updates.len() > 0: - withForkyObject(updates[0]): + let period = withForkyObject(updates[0]): when lcDataFork > LightClientDataFork.None: - let - slot = forkyObject.attested_header.beacon.slot - period = slot.sync_committee_period - contentKey = encode(updateContentKey(period.uint64, count)) - forkDigest = forkDigestAtEpoch(forkDigests[], epoch(slot), cfg) - - content = encodeLightClientUpdatesForked(forkDigest, updates) - - proc GossipRpcAndClose(): Future[Result[void, string]] {.async.} = - try: - let - contentKeyHex = contentKey.asSeq().toHex() - peers = await portalRpcClient.portal_beaconRandomGossip( - contentKeyHex, content.toHex() - ) - info "Beacon LC update gossiped", - peers, contentKey = contentKeyHex, period, count - return ok() - except CatchableError as e: - return err("JSON-RPC error: " & $e.msg) - - let res = await GossipRpcAndClose() - if res.isOk(): - return ok() - else: - return err(res.error) + let slot = forkyObject.attested_header.beacon.slot + slot.sync_committee_period else: return err("No LC updates pre Altair") + + let contentKey = encode(updateContentKey(period.uint64, count)) + let contentItem = encodeLightClientUpdatesForked( + ForkedLightClientUpdateList.init(updates), forkDigests[], cfg + ) + + proc GossipRpcAndClose(): Future[Result[void, string]] {.async.} = + try: + let + contentKeyHex = contentKey.asSeq().toHex() + peers = await portalRpcClient.portal_beaconRandomGossip( + contentKeyHex, contentItem.toHex() + ) + info "Beacon LC update gossiped", + peers, contentKey = contentKeyHex, period, count + return ok() + except CatchableError as e: + return err("JSON-RPC error: " & $e.msg) + + await GossipRpcAndClose() else: # TODO: # currently only error if no updates at all found. This might be due @@ -310,12 +306,12 @@ proc runBeacon*(config: PortalBridgeConf) {.raises: [CatchableError].} = # Bootstrap backfill, currently just one bootstrap selected by # trusted-block-root, could become a selected list, or some other way. if trustedBlockRoot.isSome(): - let res = await gossipLCBootstrapUpdate( - beaconRestClient, portalRpcClient, trustedBlockRoot.get(), cfg, forkDigests - ) - - if res.isErr(): - warn "Error gossiping LC bootstrap", error = res.error + ( + await gossipLCBootstrapUpdate( + beaconRestClient, portalRpcClient, trustedBlockRoot.get(), cfg, forkDigests + ) + ).isOkOr: + warn "Error gossiping LC bootstrap", error await portalRpcClient.close() @@ -337,32 +333,35 @@ proc runBeacon*(config: PortalBridgeConf) {.raises: [CatchableError].} = leftOver = backfillAmount mod updatesPerRequest for i in 0 ..< requestAmount: - let res = await gossipLCUpdates( - beaconRestClient, - portalRpcClient, - currentPeriod - updatesPerRequest * (i + 1) + 1, - updatesPerRequest, - cfg, - forkDigests, - ) - - if res.isErr(): - warn "Error gossiping LC updates", error = res.error + ( + await gossipLCUpdates( + beaconRestClient, + portalRpcClient, + (currentPeriod - backfillAmount) + i * updatesPerRequest + 1, + updatesPerRequest, + cfg, + forkDigests, + ) + ).isOkOr: + warn "Error gossiping LC updates", error await portalRpcClient.close() + # Give time to the nodes to process the data + await sleepAsync(3.seconds) + if leftOver > 0: - let res = await gossipLCUpdates( - beaconRestClient, - portalRpcClient, - currentPeriod - updatesPerRequest * requestAmount - leftOver + 1, - leftOver, - cfg, - forkDigests, - ) - - if res.isErr(): - warn "Error gossiping LC updates", error = res.error + ( + await gossipLCUpdates( + beaconRestClient, + portalRpcClient, + (currentPeriod - backfillAmount) + requestAmount * updatesPerRequest + 1, + leftOver, + cfg, + forkDigests, + ) + ).isOkOr: + warn "Error gossiping LC updates", error await portalRpcClient.close() @@ -399,37 +398,36 @@ proc runBeacon*(config: PortalBridgeConf) {.raises: [CatchableError].} = # Or basically `lightClientOptimisticUpdateSlotOffset` await sleepAsync((SECONDS_PER_SLOT div INTERVALS_PER_SLOT).int.seconds) - let res = + let lastOptimisticUpdateSlot = ( await gossipLCOptimisticUpdate(restClient, portalRpcClient, cfg, forkDigests) - - if res.isErr(): - warn "Error gossiping LC optimistic update", error = res.error - else: - if wallEpoch > lastFinalityUpdateEpoch + 2 and wallSlot > start_slot(wallEpoch): - let res = - await gossipLCFinalityUpdate(restClient, portalRpcClient, cfg, forkDigests) - - if res.isErr(): - warn "Error gossiping LC finality update", error = res.error - else: - let (slot, blockRoot) = res.value() - lastFinalityUpdateEpoch = epoch(slot) - let res = await gossipLCBootstrapUpdate( - restClient, portalRpcClient, blockRoot, cfg, forkDigests - ) - - if res.isErr(): - warn "Error gossiping LC bootstrap", error = res.error - - let res2 = await gossipHistoricalSummaries( - restClient, portalRpcClient, cfg, forkDigests + ).valueOr: + warn "Error gossiping LC optimistic update", error + return + + if wallEpoch > lastFinalityUpdateEpoch + 2 and wallSlot > start_slot(wallEpoch): + let (slot, blockRoot) = ( + await gossipLCFinalityUpdate(restClient, portalRpcClient, cfg, forkDigests) + ).valueOr: + warn "Error gossiping LC finality update", error + return + + lastFinalityUpdateEpoch = epoch(slot) + + ( + await gossipLCBootstrapUpdate( + restClient, portalRpcClient, blockRoot, cfg, forkDigests ) - if res2.isErr(): - warn "Error gossiping historical summaries", error = res.error + ).isOkOr: + warn "Error gossiping LC bootstrap", error + return - if wallPeriod > lastUpdatePeriod and wallSlot > start_slot(wallEpoch): - # TODO: Need to delay timing here also with one slot? - let res = await gossipLCUpdates( + (await gossipHistoricalSummaries(restClient, portalRpcClient, cfg, forkDigests)).isOkOr: + warn "Error gossiping historical summaries", error + return + + if wallPeriod > lastUpdatePeriod and wallSlot > start_slot(wallEpoch): + ( + await gossipLCUpdates( restClient, portalRpcClient, sync_committee_period(wallSlot).uint64, @@ -437,13 +435,11 @@ proc runBeacon*(config: PortalBridgeConf) {.raises: [CatchableError].} = cfg, forkDigests, ) + ).isOkOr: + warn "Error gossiping LC update", error + return - if res.isErr(): - warn "Error gossiping LC update", error = res.error - else: - lastUpdatePeriod = wallPeriod - - lastOptimisticUpdateSlot = res.get() + lastUpdatePeriod = wallPeriod proc runOnSlotLoop() {.async.} = var diff --git a/portal/network/beacon/content/content_values.nim b/portal/network/beacon/content/content_values.nim index 96f57e26f6..3e81640133 100644 --- a/portal/network/beacon/content/content_values.nim +++ b/portal/network/beacon/content/content_values.nim @@ -83,13 +83,20 @@ func encodeOptimisticUpdateForked*( encodeForkedLightClientObject(optimisticUpdate, forkDigest) func encodeLightClientUpdatesForked*( - forkDigest: ForkDigest, updates: openArray[ForkedLightClientUpdate] + updates: ForkedLightClientUpdateList, forkDigests: ForkDigests, cfg: RuntimeConfig ): seq[byte] = var list: ForkedLightClientUpdateBytesList for update in updates: - discard list.add( - ForkedLightClientUpdateBytes(encodeForkedLightClientObject(update, forkDigest)) - ) + withForkyObject(update): + when lcDataFork > LightClientDataFork.None: + let slot = forkyObject.attested_header.beacon.slot + let forkDigest = forkDigestAtEpoch(forkDigests, epoch(slot), cfg) + + discard list.add( + ForkedLightClientUpdateBytes( + encodeForkedLightClientObject(update, forkDigest) + ) + ) SSZ.encode(list) diff --git a/portal/tests/beacon_network_tests/test_beacon_content.nim b/portal/tests/beacon_network_tests/test_beacon_content.nim index 83e6a71f48..3c1815cef2 100644 --- a/portal/tests/beacon_network_tests/test_beacon_content.nim +++ b/portal/tests/beacon_network_tests/test_beacon_content.nim @@ -16,6 +16,7 @@ import beacon_chain/networking/network_metadata, beacon_chain/spec/forks, ../../network/beacon/beacon_content, + ../../network/beacon/beacon_init_loader, ../../eth_data/yaml_utils, "."/light_client_test_data @@ -111,17 +112,11 @@ suite "Beacon Content Keys and Values - Test Vectors": (SLOTS_PER_EPOCH * EPOCHS_PER_SYNC_COMMITTEE_PERIOD) == key.lightClientUpdateKey.startPeriod + uint64(i) - let forkDigest = forkDigestAtEpoch( - forkDigests[], - epoch(forkyObject.attested_header.beacon.slot), - metadata.cfg, - ) + # re-encode content and content key + let encoded = encodeLightClientUpdatesForked(updates, forkDigests[], metadata.cfg) - # re-encode content and content key - let encoded = encodeLightClientUpdatesForked(forkDigest, updates.asSeq()) - - check encoded.toHex() == contentValueEncoded.toHex() - check encode(key).asSeq() == contentKeyEncoded + check encoded.toHex() == contentValueEncoded.toHex() + check encode(key).asSeq() == contentKeyEncoded test "LightClientFinalityUpdate": let @@ -202,6 +197,8 @@ suite "Beacon Content Keys and Values": bellatrix: ForkDigest([0'u8, 0, 0, 3]), capella: ForkDigest([0'u8, 0, 0, 4]), deneb: ForkDigest([0'u8, 0, 0, 5]), + electra: ForkDigest([0'u8, 0, 0, 6]), + fulu: ForkDigest([0'u8, 0, 0, 7]), ) test "LightClientBootstrap": @@ -240,9 +237,10 @@ suite "Beacon Content Keys and Values": update = ForkedLightClientUpdate( kind: LightClientDataFork.Altair, altairData: altairData ) - updateList = @[update, update] + updateList = ForkedLightClientUpdateList.init(@[update, update]) + cfg = loadNetworkData("mainnet").metadata.cfg - encoded = encodeLightClientUpdatesForked(forkDigests.altair, updateList) + encoded = encodeLightClientUpdatesForked(updateList, forkDigests, cfg) decoded = decodeLightClientUpdatesByRange(forkDigests, encoded) check: diff --git a/portal/tests/beacon_network_tests/test_beacon_network.nim b/portal/tests/beacon_network_tests/test_beacon_network.nim index 75ad6cbceb..36ea0ad320 100644 --- a/portal/tests/beacon_network_tests/test_beacon_network.nim +++ b/portal/tests/beacon_network_tests/test_beacon_network.nim @@ -159,8 +159,9 @@ procSuite "Beacon Network": update2 = ForkedLightClientUpdate( kind: LightClientDataFork.Altair, altairData: altairData2 ) - updates = @[update1, update2] - content = encodeLightClientUpdatesForked(forkDigests.altair, updates) + updates = ForkedLightClientUpdateList.init(@[update1, update2]) + content = + encodeLightClientUpdatesForked(updates, forkDigests, networkData.metadata.cfg) startPeriod = altairData1.attested_header.beacon.slot.sync_committee_period contentKey = ContentKey( contentType: lightClientUpdate, diff --git a/portal/tools/eth_data_exporter/cl_data_exporter.nim b/portal/tools/eth_data_exporter/cl_data_exporter.nim index 20b9f0088c..4265639a4e 100644 --- a/portal/tools/eth_data_exporter/cl_data_exporter.nim +++ b/portal/tools/eth_data_exporter/cl_data_exporter.nim @@ -142,7 +142,9 @@ proc exportLCUpdates*( forkDigests[], epoch(forkyObject.attested_header.beacon.slot), cfg ) - content = encodeLightClientUpdatesForked(forkDigest, updates) + content = encodeLightClientUpdatesForked( + ForkedLightClientUpdateList.init(updates), forkDigests[], cfg + ) file = dataDir / fileName writePortalContentToYaml(file, contentKey.asSeq().to0xHex(), content.to0xHex()) From aff4b93a50679e4304f7e397884164f7eb8aa0e1 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Mon, 26 May 2025 19:24:28 +0200 Subject: [PATCH 041/138] Portal beacon: reset LC store on setting new trusted block root (#3331) --- portal/client/nimbus_portal_client.nim | 3 ++- portal/network/beacon/beacon_light_client.nim | 27 +++++++++++++++++-- portal/rpc/rpc_portal_nimbus_beacon_api.nim | 6 ++--- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/portal/client/nimbus_portal_client.nim b/portal/client/nimbus_portal_client.nim index 9a87f4dc28..e7e0a3e790 100644 --- a/portal/client/nimbus_portal_client.nim +++ b/portal/client/nimbus_portal_client.nim @@ -328,7 +328,8 @@ proc run(portalClient: PortalClient, config: PortalConf) {.raises: [CatchableErr rpcServer.installPortalBeaconApiHandlers( node.beaconNetwork.value.portalProtocol ) - rpcServer.installPortalNimbusBeaconApiHandlers(node.beaconNetwork.value) + if node.beaconLightClient.isSome(): + rpcServer.installPortalNimbusBeaconApiHandlers(node.beaconLightClient.value) if node.stateNetwork.isSome(): rpcServer.installPortalCommonApiHandlers( node.stateNetwork.value.portalProtocol, PortalSubnetwork.state diff --git a/portal/network/beacon/beacon_light_client.nim b/portal/network/beacon/beacon_light_client.nim index cd4fcee7bd..4ccf65bf05 100644 --- a/portal/network/beacon/beacon_light_client.nim +++ b/portal/network/beacon/beacon_light_client.nim @@ -1,5 +1,5 @@ # Nimbus - Portal Network -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -193,6 +193,29 @@ proc stop*(lightClient: LightClient) {.async: (raises: []).} = proc resetToFinalizedHeader*( lightClient: LightClient, header: ForkedLightClientHeader, - current_sync_committee: altair.SyncCommittee, + current_sync_committee: SyncCommittee, ) = lightClient.processor[].resetToFinalizedHeader(header, current_sync_committee) + +proc resetToTrustedBlockRoot*( + lightClient: LightClient, trustedBlockRoot: Digest +) {.async: (raises: [CancelledError]).} = + lightClient.network.trustedBlockRoot = Opt.some(trustedBlockRoot) + + let bootstrap = (await lightClient.network.getLightClientBootstrap(trustedBlockRoot)).valueOr: + warn "Could not get bootstrap, wait for offer" + # Empty, this will reset the LC store. + # Then it will continue requesting or can receive through an offer + lightClient.processor[].resetToFinalizedHeader( + ForkedLightClientHeader(), SyncCommittee() + ) + return + + withForkyBootstrap(bootstrap): + when lcDataFork > LightClientDataFork.None: + let forkedHeader = ForkedLightClientHeader.init(forkyBootstrap.header) + lightClient.resetToFinalizedHeader( + forkedHeader, forkyBootstrap.current_sync_committee + ) + else: + warn "Could not reset to trusted block root: no light client header pre Altair" diff --git a/portal/rpc/rpc_portal_nimbus_beacon_api.nim b/portal/rpc/rpc_portal_nimbus_beacon_api.nim index 10365eb4eb..cd80b44f16 100644 --- a/portal/rpc/rpc_portal_nimbus_beacon_api.nim +++ b/portal/rpc/rpc_portal_nimbus_beacon_api.nim @@ -7,13 +7,13 @@ {.push raises: [].} -import json_rpc/rpcserver, ../network/beacon/beacon_network +import json_rpc/rpcserver, ../network/beacon/beacon_light_client export rpcserver # nimbus portal specific RPC methods for the Portal beacon network. -proc installPortalNimbusBeaconApiHandlers*(rpcServer: RpcServer, n: BeaconNetwork) = +proc installPortalNimbusBeaconApiHandlers*(rpcServer: RpcServer, lc: LightClient) = rpcServer.rpc("portal_nimbus_beaconSetTrustedBlockRoot") do(blockRoot: string) -> bool: let root = Digest.fromHex(blockRoot) - n.trustedBlockRoot = Opt.some(root) + await lc.resetToTrustedBlockRoot(root) true From d40f487c07c781c548833f392d7205d4aefbe098 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 27 May 2025 15:07:53 +0800 Subject: [PATCH 042/138] Portal client: Drop remaining offer content after first validation failure in state network (#3334) --- .../grafana/portal_grafana_dashboard.json | 6 +++--- portal/network/beacon/beacon_network.nim | 5 ++--- portal/network/history/history_network.nim | 2 +- portal/network/state/state_network.nim | 4 ++++ portal/network/wire/ping_extensions.nim | 2 ++ portal/network/wire/portal_protocol.nim | 21 ++----------------- .../test_portal_wire_protocol.nim | 2 +- 7 files changed, 15 insertions(+), 27 deletions(-) diff --git a/portal/metrics/grafana/portal_grafana_dashboard.json b/portal/metrics/grafana/portal_grafana_dashboard.json index 118ee82fc5..f748d38be7 100644 --- a/portal/metrics/grafana/portal_grafana_dashboard.json +++ b/portal/metrics/grafana/portal_grafana_dashboard.json @@ -2151,7 +2151,7 @@ "exemplar": false, "expr": "rate(portal_offer_accept_codes_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", "interval": "", - "legendFormat": "portal_offer_accept_codes[protocol_id={{protocol_id}, accept_code={{accept_code}}]", + "legendFormat": "portal_offer_accept_codes[protocol_id={{protocol_id}}, accept_code={{accept_code}}]", "refId": "A" }, { @@ -2163,11 +2163,11 @@ "expr": "rate(portal_handle_offer_accept_codes_total{instance=\"${instance}\",container=\"${container}\"}[$__rate_interval])", "hide": false, "interval": "", - "legendFormat": "portal_handle_offer_accept_codes[protocol_id={{protocol_id}, accept_code={{accept_code}}]", + "legendFormat": "portal_handle_offer_accept_codes[protocol_id={{protocol_id}}, accept_code={{accept_code}}]", "refId": "B" } ], - "title": "", + "title": "Neighborhood gossip content offer accept codes", "type": "timeseries" }, { diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index c934620bb9..96af5ad11d 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -453,9 +453,8 @@ proc contentQueueWorker(n: BeaconNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): - await n.portalProtocol.randomGossipDiscardPeers( - srcNodeId, contentKeys, contentItems - ) + discard + await n.portalProtocol.randomGossip(srcNodeId, contentKeys, contentItems) except CancelledError: trace "contentQueueWorker canceled" diff --git a/portal/network/history/history_network.nim b/portal/network/history/history_network.nim index a4cc8e49fe..d9ca1e03a1 100644 --- a/portal/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -428,7 +428,7 @@ proc contentQueueWorker(n: HistoryNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): - await n.portalProtocol.neighborhoodGossipDiscardPeers( + discard await n.portalProtocol.neighborhoodGossip( srcNodeId, contentKeys, contentItems ) except CancelledError: diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index a300e00a5d..25a023946f 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -262,6 +262,10 @@ proc contentQueueWorker(n: StateNetwork) {.async: (raises: []).} = state_network_offers_failed.inc(labelValues = [$n.portalProtocol.protocolId]) error "Received offered content failed validation", srcNodeId, contentKeyBytes, error = offerRes.error() + + # The content validation failed so drop the remaining content (if any) from + # this offer, because the remainly content is also likely to fail validation. + break except CancelledError: trace "contentQueueWorker canceled" diff --git a/portal/network/wire/ping_extensions.nim b/portal/network/wire/ping_extensions.nim index 9e4dc278da..837c7f17e8 100644 --- a/portal/network/wire/ping_extensions.nim +++ b/portal/network/wire/ping_extensions.nim @@ -21,6 +21,8 @@ const MAX_CAPABILITIES_LENGTH* = 400 MAX_ERROR_BYTE_LENGTH* = 300 + NIMBUS_PORTAL_CLIENT_INFO* = ByteList[MAX_CLIENT_INFO_BYTE_LENGTH].init(@[]) + # Different ping extension payloads, TODO: could be moved to each their own file? type CapabilitiesPayload* = object diff --git a/portal/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim index ef35795afc..a541388af6 100644 --- a/portal/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -440,7 +440,7 @@ proc handlePingExtension( payloadType, encodePayload( CapabilitiesPayload( - client_info: ByteList[MAX_CLIENT_INFO_BYTE_LENGTH].init(@[]), + client_info: NIMBUS_PORTAL_CLIENT_INFO, data_radius: p.dataRadius(), capabilities: List[uint16, MAX_CAPABILITIES_LENGTH].init( p.pingExtensionCapabilities.toSeq() @@ -849,7 +849,7 @@ proc pingImpl*( ): Future[PortalResult[PongMessage]] {.async: (raises: [CancelledError]).} = let pingPayload = encodePayload( CapabilitiesPayload( - client_info: ByteList[MAX_CLIENT_INFO_BYTE_LENGTH].init(@[]), + client_info: NIMBUS_PORTAL_CLIENT_INFO, data_radius: p.dataRadius(), capabilities: List[uint16, MAX_CAPABILITIES_LENGTH].init(p.pingExtensionCapabilities.toSeq()), @@ -1846,15 +1846,6 @@ proc neighborhoodGossip*( await p.offerBatchGetPeerCount(offers) -proc neighborhoodGossipDiscardPeers*( - p: PortalProtocol, - srcNodeId: Opt[NodeId], - contentKeys: ContentKeysList, - content: seq[seq[byte]], - enableNodeLookup = false, -): Future[void] {.async: (raises: [CancelledError]).} = - discard await p.neighborhoodGossip(srcNodeId, contentKeys, content, enableNodeLookup) - proc randomGossip*( p: PortalProtocol, srcNodeId: Opt[NodeId], @@ -1877,14 +1868,6 @@ proc randomGossip*( await p.offerBatchGetPeerCount(offers) -proc randomGossipDiscardPeers*( - p: PortalProtocol, - srcNodeId: Opt[NodeId], - contentKeys: ContentKeysList, - content: seq[seq[byte]], -): Future[void] {.async: (raises: [CancelledError]).} = - discard await p.randomGossip(srcNodeId, contentKeys, content) - proc storeContent*( p: PortalProtocol, contentKey: ContentKeyByteList, diff --git a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim index 5748f94b76..1393100f25 100644 --- a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim +++ b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim @@ -94,7 +94,7 @@ procSuite "Portal Wire Protocol Tests": let pong = await proto1.ping(proto2.localNode) let customPayload = CapabilitiesPayload( - client_info: ByteList[MAX_CLIENT_INFO_BYTE_LENGTH].init(@[]), + client_info: NIMBUS_PORTAL_CLIENT_INFO, data_radius: UInt256.high(), capabilities: List[uint16, MAX_CAPABILITIES_LENGTH].init( proto1.pingExtensionCapabilities.toSeq() From 34fc594508f30928078bac80ce4ec720b1703f07 Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 27 May 2025 15:25:02 +0700 Subject: [PATCH 043/138] Wire protocol: avoid recomputing block hash in getStatus68/69 (#3332) --- execution_chain/sync/wire_protocol/handler.nim | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index 7a6ec54adc..b5fe62a4d5 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -54,7 +54,7 @@ proc getStatus68*(ctx: EthWireRef): Eth68State = Eth68State( totalDifficulty: txFrame.headTotalDifficulty, genesisHash: com.genesisHash, - bestBlockHash: bestBlock.computeBlockHash, + bestBlockHash: ctx.chain.latestHash, forkId: ChainForkId( forkHash: forkId.crc.toBytesBE, forkNext: forkId.nextFork @@ -74,7 +74,7 @@ proc getStatus69*(ctx: EthWireRef): Eth69State = ), earliest: 0, latest: bestBlock.number, - latestHash: bestBlock.computeBlockHash, + latestHash: ctx.chain.latestHash, ) proc getReceipts*(ctx: EthWireRef, From 60072e55538e81191fdd26b4bbfa0cc64700115c Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Tue, 27 May 2025 14:30:42 +0200 Subject: [PATCH 044/138] Disable testutp CI job until it gets fixed (#3336) Tried previous commits (before it first started failing) and those also fails, which would indicate that it is an issue caused by a change in the testing env. Can reproduce locally so will be further investigated but disabled in CI to not block other PRs by this recurrent failure. --- .github/workflows/portal.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/portal.yml b/.github/workflows/portal.yml index 6939465ca2..1ae370cbf3 100644 --- a/.github/workflows/portal.yml +++ b/.github/workflows/portal.yml @@ -40,7 +40,8 @@ jobs: runs-on: ubuntu-22.04 # TODO: for now only push event as this way it is easier to get branch name # to build container - if: github.event_name == 'push' + # if: github.event_name == 'push' + if: false steps: - name: Checkout nimbus-eth1 uses: actions/checkout@v4 From f7f9b3e33bd7d07077c267f1f4746123fdc7e97a Mon Sep 17 00:00:00 2001 From: Chirag Parmar Date: Tue, 27 May 2025 21:23:18 +0530 Subject: [PATCH 045/138] proxy: housekeeping (#3315) * refactor accounts based calls * housekeeping * isolate into files * solve quantity problem * remove comment * review * Update nimbus_verified_proxy/nimbus_verified_proxy.nim Co-authored-by: bhartnett <51288821+bhartnett@users.noreply.github.com> * add handlers to rpc_eth_api --------- Co-authored-by: bhartnett <51288821+bhartnett@users.noreply.github.com> --- .../nimbus_verified_proxy.nim | 3 +- .../{validate_proof.nim => rpc/accounts.nim} | 95 ++++++++++-- nimbus_verified_proxy/rpc/blocks.nim | 67 +++++++++ nimbus_verified_proxy/rpc/rpc_eth_api.nim | 140 ++---------------- .../tests/test_proof_validation.nim | 6 +- nimbus_verified_proxy/types.nim | 27 ++++ 6 files changed, 197 insertions(+), 141 deletions(-) rename nimbus_verified_proxy/{validate_proof.nim => rpc/accounts.nim} (50%) create mode 100644 nimbus_verified_proxy/rpc/blocks.nim create mode 100644 nimbus_verified_proxy/types.nim diff --git a/nimbus_verified_proxy/nimbus_verified_proxy.nim b/nimbus_verified_proxy/nimbus_verified_proxy.nim index 4a8f47ef48..1038577182 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy.nim @@ -23,6 +23,7 @@ import beacon_chain/[light_client, nimbus_binary_common, version], ../execution_chain/rpc/cors, ../execution_chain/common/common, + ./types, ./rpc/rpc_eth_api, ./nimbus_verified_proxy_conf, ./header_store @@ -83,7 +84,7 @@ proc run*( # header cache contains headers downloaded from p2p headerStore = HeaderStore.new(config.cacheLen) - var verifiedProxy = VerifiedRpcProxy.new(rpcProxy, headerStore, chainId) + let verifiedProxy = VerifiedRpcProxy.init(rpcProxy, headerStore, chainId) # add handlers that verify RPC calls /rpc/rpc_eth_api.nim verifiedProxy.installEthApiHandlers() diff --git a/nimbus_verified_proxy/validate_proof.nim b/nimbus_verified_proxy/rpc/accounts.nim similarity index 50% rename from nimbus_verified_proxy/validate_proof.nim rename to nimbus_verified_proxy/rpc/accounts.nim index 1e14dc3753..b572a05896 100644 --- a/nimbus_verified_proxy/validate_proof.nim +++ b/nimbus_verified_proxy/rpc/accounts.nim @@ -1,5 +1,5 @@ # nimbus_verified_proxy -# Copyright (c) 2022-2025 Status Research & Development GmbH +# Copyright (c) 2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -10,12 +10,15 @@ import std/sequtils, stint, + chronos, results, - eth/common/[base_rlp, accounts_rlp, hashes_rlp], + chronicles, + eth/common/eth_types_rlp, eth/trie/[hexary_proof_verification], - web3/eth_api_types - -export results, stint, hashes_rlp, accounts_rlp, eth_api_types + json_rpc/[rpcproxy, rpcserver, rpcclient], + web3/[primitives, eth_api_types, eth_api], + ../../execution_chain/beacon/web3_eth_conv, + ../types proc getAccountFromProof*( stateRoot: Hash32, @@ -35,7 +38,7 @@ proc getAccountFromProof*( codeHash: accountCodeHash, ) accountEncoded = rlp.encode(acc) - accountKey = keccak256((accountAddress.data)).data + accountKey = toSeq(keccak256((accountAddress.data)).data) let proofResult = verifyMptProof(mptNodesBytes, stateRoot, accountKey, accountEncoded) @@ -47,12 +50,12 @@ proc getAccountFromProof*( of InvalidProof: return err(proofResult.errorMsg) -proc getStorageData( +proc getStorageFromProof( account: Account, storageProof: StorageProof ): Result[UInt256, string] = let storageMptNodes = storageProof.proof.mapIt(distinctBase(it)) - key = keccak256(toBytesBE(storageProof.key)).data + key = toSeq(keccak256(toBytesBE(storageProof.key)).data) encodedValue = rlp.encode(storageProof.value) proofResult = verifyMptProof(storageMptNodes, account.storageRoot, key, encodedValue) @@ -65,7 +68,7 @@ proc getStorageData( of InvalidProof: return err(proofResult.errorMsg) -proc getStorageData*( +proc getStorageFromProof*( stateRoot: Hash32, requestedSlot: UInt256, proof: ProofResponse ): Result[UInt256, string] = let account = @@ -90,7 +93,75 @@ proc getStorageData*( if storageProof.key != requestedSlot: return err("received proof for invalid slot") - getStorageData(account, storageProof) + getStorageFromProof(account, storageProof) + +proc getAccount*( + lcProxy: VerifiedRpcProxy, + address: Address, + blockNumber: base.BlockNumber, + stateRoot: Root, +): Future[Result[Account, string]] {.async.} = + info "Forwarding eth_getAccount", blockNumber + + let + proof = + try: + await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) + + account = getAccountFromProof( + stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, + proof.storageHash, proof.accountProof, + ) + + return account + +proc getCode*( + lcProxy: VerifiedRpcProxy, + address: Address, + blockNumber: base.BlockNumber, + stateRoot: Root, +): Future[Result[seq[byte], string]] {.async.} = + # get verified account details for the address at blockNumber + let account = (await lcProxy.getAccount(address, blockNumber, stateRoot)).valueOr: + return err(error) + + # if the account does not have any code, return empty hex data + if account.codeHash == EMPTY_CODE_HASH: + return ok(newSeq[byte]()) + + info "Forwarding eth_getCode", blockNumber + + let code = + try: + await lcProxy.rpcClient.eth_getCode(address, blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) + + # verify the byte code. since we verified the account against + # the state root we just need to verify the code hash + if account.codeHash == keccak256(code): + return ok(code) + else: + return err("received code doesn't match the account code hash") + +proc getStorageAt*( + lcProxy: VerifiedRpcProxy, + address: Address, + slot: UInt256, + blockNumber: base.BlockNumber, + stateRoot: Root, +): Future[Result[UInt256, string]] {.async.} = + info "Forwarding eth_getStorageAt", blockNumber + + let + proof = + try: + await lcProxy.rpcClient.eth_getProof(address, @[slot], blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) + + slotValue = getStorageFromProof(stateRoot, slot, proof) -func isValidCode*(account: Account, code: openArray[byte]): bool = - account.codeHash == keccak256(code) + return slotValue diff --git a/nimbus_verified_proxy/rpc/blocks.nim b/nimbus_verified_proxy/rpc/blocks.nim new file mode 100644 index 0000000000..5eccaab9a2 --- /dev/null +++ b/nimbus_verified_proxy/rpc/blocks.nim @@ -0,0 +1,67 @@ +# nimbus_verified_proxy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import + std/strutils, + stint, + chronos, + results, + eth/common/eth_types_rlp, + web3/eth_api_types, + ../header_store, + ../types + +type + QuantityTagKind = enum + LatestBlock + BlockNumber + + QuantityTag = object + case kind: QuantityTagKind + of LatestBlock: + discard + of BlockNumber: + blockNumber: Quantity + +func parseQuantityTag(blockTag: BlockTag): Result[QuantityTag, string] = + if blockTag.kind == bidAlias: + let tag = blockTag.alias.toLowerAscii + case tag + of "latest": + return ok(QuantityTag(kind: LatestBlock)) + else: + return err("Unsupported blockTag: " & tag) + else: + let quantity = blockTag.number + return ok(QuantityTag(kind: BlockNumber, blockNumber: quantity)) + +template checkPreconditions(proxy: VerifiedRpcProxy) = + if proxy.headerStore.isEmpty(): + raise newException(ValueError, "Syncing") + +proc getHeaderByTag( + proxy: VerifiedRpcProxy, quantityTag: BlockTag +): results.Opt[Header] {.raises: [ValueError].} = + checkPreconditions(proxy) + + let tag = parseQuantityTag(quantityTag).valueOr: + raise newException(ValueError, error) + + case tag.kind + of LatestBlock: + # this will always return some block, as we always checkPreconditions + proxy.headerStore.latest + of BlockNumber: + proxy.headerStore.get(base.BlockNumber(distinctBase(tag.blockNumber))) + +proc getHeaderByTagOrThrow*( + proxy: VerifiedRpcProxy, quantityTag: BlockTag +): Header {.raises: [ValueError].} = + getHeaderByTag(proxy, quantityTag).valueOr: + raise newException(ValueError, "No block stored for given tag " & $quantityTag) diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index a0d9a4a73c..7b155a6ac1 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -8,77 +8,19 @@ {.push raises: [].} import - std/strutils, results, chronicles, json_rpc/[rpcserver, rpcclient, rpcproxy], eth/common/accounts, web3/eth_api, - ../validate_proof, - ../header_store + ../types, + ../header_store, + ./accounts, + ./blocks logScope: topics = "verified_proxy" -type - VerifiedRpcProxy* = ref object - proxy: RpcProxy - headerStore: HeaderStore - chainId: UInt256 - - QuantityTagKind = enum - LatestBlock - BlockNumber - - BlockTag = eth_api_types.RtBlockIdentifier - - QuantityTag = object - case kind: QuantityTagKind - of LatestBlock: - discard - of BlockNumber: - blockNumber: Quantity - -func parseQuantityTag(blockTag: BlockTag): Result[QuantityTag, string] = - if blockTag.kind == bidAlias: - let tag = blockTag.alias.toLowerAscii - case tag - of "latest": - return ok(QuantityTag(kind: LatestBlock)) - else: - return err("Unsupported blockTag: " & tag) - else: - let quantity = blockTag.number - return ok(QuantityTag(kind: BlockNumber, blockNumber: quantity)) - -template checkPreconditions(proxy: VerifiedRpcProxy) = - if proxy.headerStore.isEmpty(): - raise newException(ValueError, "Syncing") - -template rpcClient(lcProxy: VerifiedRpcProxy): RpcClient = - lcProxy.proxy.getClient() - -proc getHeaderByTag( - proxy: VerifiedRpcProxy, quantityTag: BlockTag -): results.Opt[Header] {.raises: [ValueError].} = - checkPreconditions(proxy) - - let tag = parseQuantityTag(quantityTag).valueOr: - raise newException(ValueError, error) - - case tag.kind - of LatestBlock: - # this will always return some block, as we always checkPreconditions - proxy.headerStore.latest - of BlockNumber: - proxy.headerStore.get(base.BlockNumber(distinctBase(tag.blockNumber))) - -proc getHeaderByTagOrThrow( - proxy: VerifiedRpcProxy, quantityTag: BlockTag -): Header {.raises: [ValueError].} = - getHeaderByTag(proxy, quantityTag).valueOr: - raise newException(ValueError, "No block stored for given tag " & $quantityTag) - proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = lcProxy.proxy.rpc("eth_chainId") do() -> UInt256: lcProxy.chainId @@ -93,22 +35,10 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = lcProxy.proxy.rpc("eth_getBalance") do( address: Address, quantityTag: BlockTag ) -> UInt256: - # When requesting state for `latest` block number, we need to translate - # `latest` to actual block number as `latest` on proxy and on data provider - # can mean different blocks and ultimatly piece received piece of state - # must by validated against correct state root let header = lcProxy.getHeaderByTagOrThrow(quantityTag) - blockNumber = header.number.uint64 - - info "Forwarding eth_getBalance call", blockNumber - let - proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) - account = getAccountFromProof( - header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, - proof.storageHash, proof.accountProof, - ).valueOr: + account = (await lcProxy.getAccount(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) account.balance @@ -118,64 +48,32 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = ) -> UInt256: let header = lcProxy.getHeaderByTagOrThrow(quantityTag) - blockNumber = header.number.uint64 - - info "Forwarding eth_getStorageAt", blockNumber - - let proof = - await lcProxy.rpcClient.eth_getProof(address, @[slot], blockId(blockNumber)) + storage = ( + await lcProxy.getStorageAt(address, slot, header.number, header.stateRoot) + ).valueOr: + raise newException(ValueError, error) - getStorageData(header.stateRoot, slot, proof).valueOr: - raise newException(ValueError, error) + storage lcProxy.proxy.rpc("eth_getTransactionCount") do( address: Address, quantityTag: BlockTag - ) -> uint64: + ) -> Quantity: let header = lcProxy.getHeaderByTagOrThrow(quantityTag) - blockNumber = header.number.uint64 - - info "Forwarding eth_getTransactionCount", blockNumber - - let - proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) - - account = getAccountFromProof( - header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, - proof.storageHash, proof.accountProof, - ).valueOr: + account = (await lcProxy.getAccount(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) - account.nonce + Quantity(account.nonce) lcProxy.proxy.rpc("eth_getCode") do( address: Address, quantityTag: BlockTag ) -> seq[byte]: let header = lcProxy.getHeaderByTagOrThrow(quantityTag) - blockNumber = header.number.uint64 - - info "Forwarding eth_getCode", blockNumber - let - proof = await lcProxy.rpcClient.eth_getProof(address, @[], blockId(blockNumber)) - account = getAccountFromProof( - header.stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, - proof.storageHash, proof.accountProof, - ).valueOr: + code = (await lcProxy.getCode(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) - if account.codeHash == EMPTY_CODE_HASH: - # account does not have any code, return empty hex data - return @[] - - let code = await lcProxy.rpcClient.eth_getCode(address, blockId(blockNumber)) - - if isValidCode(account, code): - return code - else: - raise newException( - ValueError, "Received code which does not match the account code hash" - ) + code # TODO: # Following methods are forwarded directly to the web3 provider and therefore @@ -187,14 +85,6 @@ proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = lcProxy.proxy.registerProxyMethod("eth_getBlockByNumber") lcProxy.proxy.registerProxyMethod("eth_getBlockByHash") -proc new*( - T: type VerifiedRpcProxy, - proxy: RpcProxy, - headerStore: HeaderStore, - chainId: UInt256, -): T = - VerifiedRpcProxy(proxy: proxy, headerStore: headerStore, chainId: chainId) - # Used to be in eth1_monitor.nim; not sure why it was deleted, # so I copied it here. --Adam template awaitWithRetries*[T]( diff --git a/nimbus_verified_proxy/tests/test_proof_validation.nim b/nimbus_verified_proxy/tests/test_proof_validation.nim index c89d1c9ea8..d067f8d5b7 100644 --- a/nimbus_verified_proxy/tests/test_proof_validation.nim +++ b/nimbus_verified_proxy/tests/test_proof_validation.nim @@ -1,5 +1,5 @@ # nimbus_verified_proxy -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -9,7 +9,7 @@ {.push raises: [].} -import unittest2, stint, stew/byteutils, web3, ../validate_proof +import unittest2, stint, stew/byteutils, web3, ../rpc/accounts suite "Merkle proof of inclusion validation": test "Validate account proof": @@ -145,7 +145,7 @@ suite "Merkle proof of inclusion validation": ], ) - let validationResult = getStorageData(stateRoot, u256(0), proof) + let validationResult = getStorageFromProof(stateRoot, u256(0), proof) check: validationResult.isOk() diff --git a/nimbus_verified_proxy/types.nim b/nimbus_verified_proxy/types.nim new file mode 100644 index 0000000000..b9eea082b6 --- /dev/null +++ b/nimbus_verified_proxy/types.nim @@ -0,0 +1,27 @@ +# nimbus_verified_proxy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +import stint, json_rpc/[rpcclient, rpcproxy], web3/eth_api_types, ./header_store + +type + VerifiedRpcProxy* = ref object + proxy*: RpcProxy + headerStore*: HeaderStore + chainId*: UInt256 + + BlockTag* = eth_api_types.RtBlockIdentifier + +template rpcClient*(vp: VerifiedRpcProxy): RpcClient = + vp.proxy.getClient() + +proc init*( + T: type VerifiedRpcProxy, + proxy: RpcProxy, + headerStore: HeaderStore, + chainId: UInt256, +): T = + VerifiedRpcProxy(proxy: proxy, headerStore: headerStore, chainId: chainId) From a880ab5639125cefaa8c6fdfbf72b11c8098d6a6 Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 28 May 2025 09:17:07 +0700 Subject: [PATCH 046/138] Synchronize FC block processing using asyncQueue (#3333) * Synchronize FC block processing using asyncQueue #3259 introduce asynchronous block import and fork choice to FC module. And because it is called from 3 places: 1. engine_newPayload 2. engine_forkChoiceUpdated 3. beacon syncer The interaction causing FC state corruption. Most visible problem is state root mismatch error. --- .../beacon/api_handler/api_forkchoice.nim | 2 +- .../beacon/api_handler/api_newpayload.nim | 2 +- execution_chain/core/chain/forked_chain.nim | 79 ++++++++++++++++--- .../core/chain/forked_chain/chain_desc.nim | 11 +++ execution_chain/nimbus_desc.nim | 26 ++++-- execution_chain/nimbus_execution_client.nim | 3 +- .../worker/blocks_staged/staged_blocks.nim | 4 +- .../nodocker/engine/engine_env.nim | 3 +- tests/test_engine_api.nim | 3 +- 9 files changed, 108 insertions(+), 25 deletions(-) diff --git a/execution_chain/beacon/api_handler/api_forkchoice.nim b/execution_chain/beacon/api_handler/api_forkchoice.nim index d25deba2d2..aa183db0ea 100644 --- a/execution_chain/beacon/api_handler/api_forkchoice.nim +++ b/execution_chain/beacon/api_handler/api_forkchoice.nim @@ -188,7 +188,7 @@ proc forkchoiceUpdated*(ben: BeaconEngineRef, raise invalidForkChoiceState("safe block not in canonical tree") # similar to headHash, safeBlockHash is saved by FC module - (await chain.forkChoice(headHash, finalizedBlockHash, safeBlockHash)).isOkOr: + (await chain.queueForkChoice(headHash, finalizedBlockHash, safeBlockHash)).isOkOr: return invalidFCU(error, chain, header) # If payload generation was requested, create a new block to be potentially diff --git a/execution_chain/beacon/api_handler/api_newpayload.nim b/execution_chain/beacon/api_handler/api_newpayload.nim index 606e78d0ed..064d578161 100644 --- a/execution_chain/beacon/api_handler/api_newpayload.nim +++ b/execution_chain/beacon/api_handler/api_newpayload.nim @@ -233,7 +233,7 @@ proc newPayload*(ben: BeaconEngineRef, trace "Importing block without sethead", hash = blockHash, number = header.number - let vres = await chain.importBlock(blk, finalized = false) + let vres = await chain.queueImportBlock(blk, finalized = false) if vres.isErr: warn "Error importing block", number = header.number, diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 6af4a03ad9..a0cdcc5220 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -41,6 +41,7 @@ export const BaseDistance = 128'u64 PersistBatchSize = 32'u64 + MaxQueueSize = 9 # ------------------------------------------------------------------------------ # Forward declarations @@ -490,6 +491,26 @@ proc updateBase(c: ForkedChainRef, newBase: BlockPos): pendingFCU = c.pendingFCU.short, resolvedFin= c.latestFinalizedBlockNumber +proc processQueue(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = + while true: + # Cooperative concurrency: one block per loop iteration - because + # we run both networking and CPU-heavy things like block processing + # on the same thread, we need to make sure that there is steady progress + # on the networking side or we get long lockups that lead to timeouts. + const + # We cap waiting for an idle slot in case there's a lot of network traffic + # taking up all CPU - we don't want to _completely_ stop processing blocks + # in this case - doing so also allows us to benefit from more batching / + # larger network reads when under load. + idleTimeout = 10.milliseconds + + discard await idleAsync().withTimeout(idleTimeout) + let + item = await c.queue.popFirst() + res = await item.handler() + if not item.responseFut.finished: + item.responseFut.complete res + # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ @@ -500,6 +521,7 @@ proc init*( baseDistance = BaseDistance; persistBatchSize = PersistBatchSize; eagerStateRoot = false; + enableQueue = false; ): T = ## Constructor that uses the current database ledger state for initialising. ## This state coincides with the canonical head that would be used for @@ -523,18 +545,24 @@ proc init*( FcuHashAndNumber(hash: baseHash, number: baseHeader.number) fcuSafe = baseTxFrame.fcuSafe().valueOr: FcuHashAndNumber(hash: baseHash, number: baseHeader.number) + fc = T(com: com, + baseBranch: baseBranch, + activeBranch: baseBranch, + branches: @[baseBranch], + hashToBlock: {baseHash: baseBranch.lastBlockPos}.toTable, + baseTxFrame: baseTxFrame, + baseDistance: baseDistance, + persistBatchSize:persistBatchSize, + quarantine: Quarantine.init(), + fcuHead: fcuHead, + fcuSafe: fcuSafe, + ) + + if enableQueue: + fc.queue = newAsyncQueue[QueueItem](maxsize = MaxQueueSize) + fc.processingQueueLoop = fc.processQueue() - T(com: com, - baseBranch: baseBranch, - activeBranch: baseBranch, - branches: @[baseBranch], - hashToBlock: {baseHash: baseBranch.lastBlockPos}.toTable, - baseTxFrame: baseTxFrame, - baseDistance: baseDistance, - persistBatchSize:persistBatchSize, - quarantine: Quarantine.init(), - fcuHead: fcuHead, - fcuSafe: fcuSafe) + fc proc importBlock*(c: ForkedChainRef, blk: Block, finalized = false): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = @@ -653,6 +681,35 @@ proc forkChoice*(c: ForkedChainRef, ok() +proc stopProcessingQueue*(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = + doAssert(c.processingQueueLoop.isNil.not, "Please set enableQueue=true when constructing FC") + await c.processingQueueLoop.cancelAndWait() + +template queueImportBlock*(c: ForkedChainRef, blk: Block, finalized = false): auto = + proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = + await c.importBlock(blk, finalized) + + let item = QueueItem( + responseFut: Future[Result[void, string]].Raising([CancelledError]).init(), + handler: asyncHandler + ) + await c.queue.addLast(item) + item.responseFut + +template queueForkChoice*(c: ForkedChainRef, + headHash: Hash32, + finalizedHash: Hash32, + safeHash: Hash32 = zeroHash32): auto = + proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = + await c.forkChoice(headHash, finalizedHash, safeHash) + + let item = QueueItem( + responseFut: Future[Result[void, string]].Raising([CancelledError]).init(), + handler: asyncHandler + ) + await c.queue.addLast(item) + item.responseFut + func finHash*(c: ForkedChainRef): Hash32 = c.pendingFCU diff --git a/execution_chain/core/chain/forked_chain/chain_desc.nim b/execution_chain/core/chain/forked_chain/chain_desc.nim index ee398898eb..91beaba5b0 100644 --- a/execution_chain/core/chain/forked_chain/chain_desc.nim +++ b/execution_chain/core/chain/forked_chain/chain_desc.nim @@ -12,6 +12,7 @@ import std/tables, + chronos, ../../../common, ../../../db/[core_db, fcu_db], ../../../portal/portal, @@ -21,6 +22,10 @@ import export tables type + QueueItem* = object + responseFut*: Future[Result[void, string]].Raising([CancelledError]) + handler*: proc(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} + ForkedChainRef* = ref object com*: CommonRef hashToBlock* : Table[Hash32, BlockPos] @@ -74,6 +79,12 @@ type fcuHead*: FcuHashAndNumber fcuSafe*: FcuHashAndNumber + # Tracking current head and safe block of FC serialization. + + queue*: AsyncQueue[QueueItem] + processingQueueLoop*: Future[void].Raising([CancelledError]) + # Prevent async re-entrancy messing up FC state + # on both `importBlock` and `forkChoice`. # ------------------------------------------------------------------------------ # These functions are private to ForkedChainRef diff --git a/execution_chain/nimbus_desc.nim b/execution_chain/nimbus_desc.nim index 9be0392e42..194d6f4eb2 100644 --- a/execution_chain/nimbus_desc.nim +++ b/execution_chain/nimbus_desc.nim @@ -8,7 +8,9 @@ # those terms. import + std/sequtils, chronos, + chronicles, ./networking/p2p, metrics/chronos_httpserver, ./rpc/rpc_server, @@ -55,17 +57,27 @@ type proc stop*(nimbus: NimbusNode, conf: NimbusConf) {.async, gcsafe.} = trace "Graceful shutdown" + var waitedFutures: seq[Future[void]] if nimbus.httpServer.isNil.not: - await nimbus.httpServer.stop() + waitedFutures.add nimbus.httpServer.stop() if nimbus.engineApiServer.isNil.not: - await nimbus.engineApiServer.stop() - if nimbus.beaconSyncRef.isNil.not: - await nimbus.beaconSyncRef.stop() + waitedFutures.add nimbus.engineApiServer.stop() if conf.maxPeers > 0: - await nimbus.networkLoop.cancelAndWait() + waitedFutures.add nimbus.networkLoop.cancelAndWait() if nimbus.peerManager.isNil.not: - await nimbus.peerManager.stop() + waitedFutures.add nimbus.peerManager.stop() + if nimbus.beaconSyncRef.isNil.not: + waitedFutures.add nimbus.beaconSyncRef.stop() if nimbus.metricsServer.isNil.not: - await nimbus.metricsServer.stop() + waitedFutures.add nimbus.metricsServer.stop() + + waitedFutures.add nimbus.fc.stopProcessingQueue() + + let + timeout = chronos.seconds(5) + completed = await withTimeout(allFutures(waitedFutures), timeout) + if not completed: + trace "Nimbus.stop(): timeout reached", timeout, + futureErrors = waitedFutures.filterIt(it.error != nil).mapIt(it.error.msg) {.pop.} diff --git a/execution_chain/nimbus_execution_client.nim b/execution_chain/nimbus_execution_client.nim index f781f52ccb..07a7d608c1 100644 --- a/execution_chain/nimbus_execution_client.nim +++ b/execution_chain/nimbus_execution_client.nim @@ -44,7 +44,8 @@ proc basicServices(nimbus: NimbusNode, # Setup the chain let fc = ForkedChainRef.init(com, eagerStateRoot = conf.eagerStateRootCheck, - persistBatchSize=conf.persistBatchSize) + persistBatchSize=conf.persistBatchSize, + enableQueue = true) fc.deserialize().isOkOr: warn "Loading block DAG from database", msg=error diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim index a5b86f78b0..1d32df2dc0 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim @@ -175,7 +175,7 @@ proc blocksImport*( continue try: - (await ctx.chain.importBlock(blocks[n])).isOkOr: + (await ctx.chain.queueImportBlock(blocks[n])).isOkOr: # The way out here is simply to re-compile the block queue. At any # point, the `FC` module data area might have been moved to a new # canonical branch. @@ -196,7 +196,7 @@ proc blocksImport*( # Allow pseudo/async thread switch. (await ctx.updateAsyncTasks()).isOkOr: break loop - + info "Imported blocks", iv=(if iv.minPt <= ctx.blk.topImported: (iv.minPt, ctx.blk.topImported).bnStr else: "n/a"), nBlocks=(ctx.blk.topImported - iv.minPt + 1), diff --git a/hive_integration/nodocker/engine/engine_env.nim b/hive_integration/nodocker/engine/engine_env.nim index 855be05d7c..94919e256d 100644 --- a/hive_integration/nodocker/engine/engine_env.nim +++ b/hive_integration/nodocker/engine/engine_env.nim @@ -82,7 +82,7 @@ proc newEngineEnv*(conf: var NimbusConf, chainFile: string, enableAuth: bool): E let node = setupEthNode(conf, ctx) com = makeCom(conf) - chain = ForkedChainRef.init(com) + chain = ForkedChainRef.init(com, enableQueue = true) txPool = TxPoolRef.new(chain) node.addEthHandlerCapability(txPool) @@ -133,6 +133,7 @@ proc close*(env: EngineEnv) = waitFor env.node.closeWait() waitFor env.client.close() waitFor env.server.closeWait() + waitFor env.chain.stopProcessingQueue() proc setRealTTD*(env: EngineEnv) = let genesis = env.com.genesisHeader diff --git a/tests/test_engine_api.nim b/tests/test_engine_api.nim index 60f807ab3a..32e9a6bf79 100644 --- a/tests/test_engine_api.nim +++ b/tests/test_engine_api.nim @@ -89,7 +89,7 @@ proc setupEnv(envFork: HardFork = MergeFork, let com = setupCom(conf) - chain = ForkedChainRef.init(com) + chain = ForkedChainRef.init(com, enableQueue = true) txPool = TxPoolRef.new(chain) let @@ -117,6 +117,7 @@ proc setupEnv(envFork: HardFork = MergeFork, proc close(env: TestEnv) = waitFor env.client.close() waitFor env.server.closeWait() + waitFor env.chain.stopProcessingQueue() proc runBasicCycleTest(env: TestEnv): Result[void, string] = let From 98d8b78f00994478951cbf371207828eed5ef838 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 28 May 2025 13:50:09 +0800 Subject: [PATCH 047/138] Portal client: Retry failed offers and make content queue size configurable (#3335) --- portal/client/nimbus_portal_client.nim | 1 + portal/client/nimbus_portal_client_conf.nim | 7 +++++++ portal/evm/async_evm.nim | 4 +++- portal/network/beacon/beacon_network.nim | 4 +++- portal/network/history/history_network.nim | 4 +++- portal/network/portal_node.nim | 4 ++++ portal/network/state/state_network.nim | 6 ++---- portal/network/wire/portal_protocol.nim | 7 ++++++- 8 files changed, 29 insertions(+), 8 deletions(-) diff --git a/portal/client/nimbus_portal_client.nim b/portal/client/nimbus_portal_client.nim index e7e0a3e790..a9bf09af7f 100644 --- a/portal/client/nimbus_portal_client.nim +++ b/portal/client/nimbus_portal_client.nim @@ -249,6 +249,7 @@ proc run(portalClient: PortalClient, config: PortalConf) {.raises: [CatchableErr storageCapacity: config.storageCapacityMB * 1_000_000, contentRequestRetries: config.contentRequestRetries.int, contentQueueWorkers: config.contentQueueWorkers, + contentQueueSize: config.contentQueueSize, ) node = PortalNode.new( diff --git a/portal/client/nimbus_portal_client_conf.nim b/portal/client/nimbus_portal_client_conf.nim index f4894f0d41..65651a13c6 100644 --- a/portal/client/nimbus_portal_client_conf.nim +++ b/portal/client/nimbus_portal_client_conf.nim @@ -431,6 +431,13 @@ type name: "debug-content-queue-workers" .}: int + contentQueueSize* {. + hidden, + desc: "Size of the in memory content queue.", + defaultValue: 50, + name: "debug-content-queue-size" + .}: int + case cmd* {.command, defaultValue: noCommand.}: PortalCmd of noCommand: discard diff --git a/portal/evm/async_evm.nim b/portal/evm/async_evm.nim index a286f4d5d8..9922431da1 100644 --- a/portal/evm/async_evm.nim +++ b/portal/evm/async_evm.nim @@ -246,8 +246,10 @@ proc callFetchingState( return err("Unable to get code") vmState.ledger.setCode(q.address, code) fetchedCode.incl(q.address) + except CancelledError as e: + raise e except CatchableError as e: - raise newException(CancelledError, e.msg) + raiseAssert(e.msg) # Shouldn't happen evmResult.toCallResult() diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index 96af5ad11d..66e76ddd75 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -200,9 +200,11 @@ proc new*( bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, contentQueueWorkers = 8, + contentQueueSize = 50, ): T = let - contentQueue = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50) + contentQueue = + newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](contentQueueSize) stream = streamManager.registerNewStream(contentQueue) diff --git a/portal/network/history/history_network.nim b/portal/network/history/history_network.nim index d9ca1e03a1..d7f539ad67 100644 --- a/portal/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -354,9 +354,11 @@ proc new*( portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, contentRequestRetries = 1, contentQueueWorkers = 8, + contentQueueSize = 50, ): T = let - contentQueue = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50) + contentQueue = + newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](contentQueueSize) stream = streamManager.registerNewStream(contentQueue) diff --git a/portal/network/portal_node.nim b/portal/network/portal_node.nim index 18c0d0917d..3baa83c327 100644 --- a/portal/network/portal_node.nim +++ b/portal/network/portal_node.nim @@ -36,6 +36,7 @@ type storageCapacity*: uint64 contentRequestRetries*: int contentQueueWorkers*: int + contentQueueSize*: int PortalNode* = ref object discovery: protocol.Protocol @@ -122,6 +123,7 @@ proc new*( bootstrapRecords = bootstrapRecords, portalConfig = config.portalConfig, contentQueueWorkers = config.contentQueueWorkers, + contentQueueSize = config.contentQueueSize, ) Opt.some(beaconNetwork) else: @@ -146,6 +148,7 @@ proc new*( portalConfig = config.portalConfig, contentRequestRetries = config.contentRequestRetries, contentQueueWorkers = config.contentQueueWorkers, + contentQueueSize = config.contentQueueSize, ) ) else: @@ -165,6 +168,7 @@ proc new*( not config.disableStateRootValidation, contentRequestRetries = config.contentRequestRetries, contentQueueWorkers = config.contentQueueWorkers, + contentQueueSize = config.contentQueueSize, ) ) else: diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index 25a023946f..efff7ece0e 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -58,12 +58,10 @@ proc new*( validateStateIsCanonical = true, contentRequestRetries = 1, contentQueueWorkers = 8, + contentQueueSize = 50, ): T = - doAssert(contentRequestRetries >= 0) - doAssert(contentQueueWorkers >= 1) - let - cq = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50) + cq = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](contentQueueSize) s = streamManager.registerNewStream(cq) portalProtocol = PortalProtocol.new( baseProtocol, diff --git a/portal/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim index a541388af6..f3ca3b35d0 100644 --- a/portal/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -1230,7 +1230,12 @@ proc offerRateLimited*( except CatchableError as e: raiseAssert(e.msg) # Shouldn't happen - let res = await p.offer(offer) + var res = await p.offer(offer) + + if res.isErr(): + # Retry the offer once if it failed for any reason + res = await p.offer(offer) + if res.isOk(): portal_gossip_offers_successful.inc(labelValues = [$p.protocolId]) else: From 0c877f200e380f0fb256b82894a5f7efba8c59a9 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Thu, 29 May 2025 09:30:51 +0800 Subject: [PATCH 048/138] Update Nimbus README (#3339) --- Makefile | 4 ++-- README.md | 41 ++++++++++++++++------------------------- nimbus.nimble | 8 +++----- 3 files changed, 21 insertions(+), 32 deletions(-) diff --git a/Makefile b/Makefile index 74681a2009..b78c1836c4 100644 --- a/Makefile +++ b/Makefile @@ -291,11 +291,11 @@ portal-test-reproducibility: # Portal tests all_history_network_custom_chain_tests: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "portal/tests/history_network_tests/$@.nim" + $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -o:build/$@ "portal/tests/history_network_tests/$@.nim" all_portal_tests: | build deps echo -e $(BUILD_MSG) "build/$@" && \ - $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "portal/tests/$@.nim" + $(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -o:build/$@ "portal/tests/$@.nim" # builds and runs the Portal test suite portal-test: | all_portal_tests all_history_network_custom_chain_tests diff --git a/README.md b/README.md index 879cb715ef..3579a5b3bd 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,6 @@ All consensus-layer client development is happening in parallel in the ## Development Updates -Monthly development updates are shared -[here](https://hackmd.io/jRpxY4WBQJ-hnsKaPDYqTw). - For more detailed write-ups on the development progress, follow the [Nimbus blog](https://blog.nimbus.team/). @@ -53,18 +50,18 @@ nix-shell default.nix # You'll run `make update` after each `git pull`, in the future, to keep those submodules up to date. # Assuming you have 4 CPU cores available, you can ask Make to run 4 parallel jobs, with "-j4". -make -j4 nimbus +make -j4 nimbus_execution_client # See available command line options -build/nimbus --help +build/nimbus_execution_client --help # Start syncing with mainnet -build/nimbus +build/nimbus_execution_client # Update to latest version git pull && make update # Build the newly downloaded version -make -j4 nimbus +make -j4 nimbus_execution_client # Run tests make test @@ -112,7 +109,7 @@ ln -s mingw32-make.exe make.exe You can now follow those instructions in the previous section. For example: ```bash -make nimbus # build the Nimbus execution client binary +make nimbus_execution_client # build the Nimbus execution client binary make test # run the test suite # etc. ``` @@ -144,9 +141,9 @@ cd status # Raspberry pi doesn't include /usr/local/lib in library search path - need to add export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH -git clone https://github.com/status-im/nimbus.git +git clone https://github.com/status-im/nimbus-eth1.git -cd nimbus +cd nimbus-eth1 # Follow instructions above! ``` @@ -168,15 +165,15 @@ Note, the Ubuntu PRoot is known to contain all Nimbus prerequisites compiled on apt install git make gcc # Clone repo and build Nimbus just like above -git clone https://github.com/status-im/nimbus.git +git clone https://github.com/status-im/nimbus-eth1.git -cd nimbus +cd nimbus_execution_client -make +make update -make nimbus +make nimbus_execution_client -build/nimbus +build/nimbus_execution_client ``` ### Experimental make variables @@ -201,12 +198,6 @@ has the same effect as <variable>=1 (ditto for other numbers.) Interesting Make variables and targets are documented in the [nimbus-build-system](https://github.com/status-im/nimbus-build-system) repo. -- you can switch the DB backend with a Nim compiler define: - `-d:nimbus_db_backend=...` where the (case-insensitive) value is one of - "rocksdb" (the default), "sqlite", "lmdb" - -- the Premix debugging tools are [documented separately](premix/readme.md) - - you can control the Makefile's verbosity with the V variable (defaults to 0): ```bash @@ -217,8 +208,8 @@ make V=2 test # even more verbose - same for the [Chronicles log level](https://github.com/status-im/nim-chronicles#chronicles_log_level): ```bash -make LOG_LEVEL=DEBUG nimbus # this is the default -make LOG_LEVEL=TRACE nimbus # log everything +make LOG_LEVEL=DEBUG nimbus_execution_client # this is the default +make LOG_LEVEL=TRACE nimbus_execution_client # log everything ``` - pass arbitrary parameters to the Nim compiler: @@ -315,7 +306,7 @@ Install Prometheus and Grafana. On Gentoo, it's `emerge prometheus grafana-bin`. ```bash # build Nimbus execution client -make nimbus +make nimbus_execution_client # the Prometheus daemon will create its data dir in the current dir, so give it its own directory mkdir ../my_metrics # copy the basic config file over there @@ -349,7 +340,7 @@ to the combined view. To edit a panel, click on its title and select "Edit". ### Troubleshooting -Report any errors you encounter, please, if not [already documented](https://github.com/status-im/nimbus/issues)! +Report any errors you encounter, please, if not [already documented](https://github.com/status-im/nimbus-eth1/issues)! * Turn it off and on again: diff --git a/nimbus.nimble b/nimbus.nimble index 8a3484a469..8306149a9b 100644 --- a/nimbus.nimble +++ b/nimbus.nimble @@ -102,13 +102,11 @@ task nimbus_portal_client, "Build nimbus_portal_client": buildBinary "nimbus_portal_client", "portal/client/", "-d:chronicles_log_level=TRACE" task portal_test, "Run Portal tests": - # Need the nimbus_db_backend in state network tests as we need a Hexary to - # start from, even though it only uses the MemoryDb. - test "portal/tests/history_network_tests/", "all_history_network_custom_chain_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" + test "portal/tests/history_network_tests/", "all_history_network_custom_chain_tests", "-d:chronicles_log_level=ERROR" # Seperate build for these tests as they are run with a low `mergeBlockNumber` # to make the tests faster. Using the real mainnet merge block number is not # realistic for these tests. - test "portal/tests", "all_portal_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130" + test "portal/tests", "all_portal_tests", "-d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130" task utp_test_app, "Build uTP test app": buildBinary "utp_test_app", "portal/tools/utp_testing/", "-d:chronicles_log_level=TRACE" @@ -125,7 +123,7 @@ task nimbus_verified_proxy, "Build Nimbus verified proxy": buildBinary "nimbus_verified_proxy", "nimbus_verified_proxy/", "-d:chronicles_log_level=TRACE" task nimbus_verified_proxy_test, "Run Nimbus verified proxy tests": - test "nimbus_verified_proxy/tests", "all_proxy_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite" + test "nimbus_verified_proxy/tests", "all_proxy_tests", "-d:chronicles_log_level=ERROR" task build_fuzzers, "Build fuzzer test cases": # This file is there to be able to quickly build the fuzzer test cases in From 77609948bfc77c4ea009f9f5bc52a6fcae9f3bf6 Mon Sep 17 00:00:00 2001 From: andri lim Date: Thu, 29 May 2025 13:32:53 +0700 Subject: [PATCH 049/138] Fix outdated metric visualisation section of README.md (#3341) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3579a5b3bd..1dbb70cbd3 100644 --- a/README.md +++ b/README.md @@ -315,7 +315,7 @@ cp -a examples/prometheus.yml ../my_metrics/ cd ../my_metrics prometheus --config.file=prometheus.yml # loads ./prometheus.yml, writes metric data to ./data # start a fresh Nimbus sync and export metrics -rm -rf ~/.cache/execution_chain/db; ./build/nimbus_execution_client --prune:archive --metricsServer +rm -rf ~/.cache/execution_chain/db; ./build/nimbus_execution_client --metrics:true ``` Start the Grafana server. On Gentoo it's `/etc/init.d/grafana start`. Go to From 7864cdc85fff0c1d7aa393cec3a61f1043b3852a Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Thu, 29 May 2025 10:54:51 +0200 Subject: [PATCH 050/138] Path-based VertexID (#3251) VertexID has 64 bits of values but currently, but the chain state database uses only a tiny fraction of that (~27 bits). Here, we split up the number space into a fixed portion statically allocated from the MPT path and a dynamic portion for leaves and storage slots. The static portion simply allocates a (bread-first) number based on the first nibbles in the address/path while any "deeper" paths instead get a dynamic VertexID like before. Since the VertedID is path-based, we can more or less guess the VertexID of any node whose path we know based on the "average" depth of the state trie. When we're lucky, a single lookup is sufficient to find the node instead of a one-by-one traversal of each level. Even in the case that a single lookup is not enough and the actual node is "deeper" than the guess, the starting point helps skip a few levels at least. Tree depth is estimated by keeping track of hits and misses and occasionally making an adjustment in the direction of the most misses. On average, this shaves 25% of the import speed for the first 15M blocks where the lookup depth is guessed to be 7 levels - deepening the trie by one more level (when more accounts eventually are added) would see even better performance. Using 8 levels of statically assigned ids results in 2**32 bits left for dynamic ids / storage slots - this should by far be enough for any foreseeable lifetime of the application, specially because large parts of "current" usage of vertexid space is remains used by actual nodes. The resulting lookup structure can be thought of as a hybrid between fully path-based lookupts and the current "sparse" id mapping. made with coffee sponsored by 0x-r4bbit fix off-by-one cleanups Co-authored-by: Advaita Saha --- execution_chain/db/aristo/aristo_blobify.nim | 2 +- .../db/aristo/aristo_check/check_top.nim | 7 -- .../db/aristo/aristo_check/check_twig.nim | 4 +- .../db/aristo/aristo_constants.nim | 26 +++-- execution_chain/db/aristo/aristo_desc.nim | 21 ++++ execution_chain/db/aristo/aristo_fetch.nim | 105 ++++++++++++++++-- execution_chain/db/aristo/aristo_hike.nim | 5 +- .../aristo/aristo_init/rocks_db/rdb_desc.nim | 2 +- execution_chain/db/aristo/aristo_merge.nim | 66 ++++++----- execution_chain/db/aristo/aristo_proof.nim | 2 +- execution_chain/db/aristo/aristo_vid.nim | 39 ++++++- .../db/core_db/backend/aristo_rocksdb.nim | 2 +- tests/test_aristo.nim | 2 +- tests/test_aristo/test_compute.nim | 4 +- tests/test_aristo/test_tx_frame.nim | 4 +- tests/test_aristo/test_vid.nim | 62 +++++++++++ 16 files changed, 286 insertions(+), 67 deletions(-) create mode 100644 tests/test_aristo/test_vid.nim diff --git a/execution_chain/db/aristo/aristo_blobify.nim b/execution_chain/db/aristo/aristo_blobify.nim index d9ad5069b4..4fa8cdc04a 100644 --- a/execution_chain/db/aristo/aristo_blobify.nim +++ b/execution_chain/db/aristo/aristo_blobify.nim @@ -51,7 +51,7 @@ template data*(v: SbeBuf): openArray[byte] = func blobify*(rvid: RootedVertexID): RVidBuf = # Length-prefixed root encoding creates a unique and common prefix for all # verticies sharing the same root - # TODO evaluate an encoding that colocates short roots (like VertexID(1)) with + # TODO evaluate an encoding that colocates short roots (like STATE_ROOT_VID) with # the length let root = rvid.root.blobify() result.buf[0] = root.len diff --git a/execution_chain/db/aristo/aristo_check/check_top.nim b/execution_chain/db/aristo/aristo_check/check_top.nim index bf7549189d..a8f5113e8e 100644 --- a/execution_chain/db/aristo/aristo_check/check_top.nim +++ b/execution_chain/db/aristo/aristo_check/check_top.nim @@ -113,13 +113,6 @@ proc checkTopCommon*( if rc.value[0].isValid: return err((rvid.vid,CheckAnyVtxEmptyKeyExpected)) - if vTop.distinctBase < LEAST_FREE_VID: - # Verify that all vids are below `LEAST_FREE_VID` - if topVid.distinctBase < LEAST_FREE_VID: - for (rvid,key) in db.layersWalkKey: - if key.isValid and LEAST_FREE_VID <= rvid.vid.distinctBase: - return err((topVid,CheckAnyVTopUnset)) - # If present, there are at least as many deleted hashes as there are deleted # vertices. if kMapNilCount != 0 and kMapNilCount < nNilVtx: diff --git a/execution_chain/db/aristo/aristo_check/check_twig.nim b/execution_chain/db/aristo/aristo_check/check_twig.nim index c26dc310c5..72fc8bdee8 100644 --- a/execution_chain/db/aristo/aristo_check/check_twig.nim +++ b/execution_chain/db/aristo/aristo_check/check_twig.nim @@ -25,7 +25,7 @@ proc checkTwig*( ): Result[void,AristoError] = let proof = ? db.makeAccountProof(accPath) - key = ? db.computeKey (VertexID(1),VertexID(1)) + key = ? db.computeKey (STATE_ROOT_VID,STATE_ROOT_VID) discard ? proof[0].verifyProof(key.to(Hash32), accPath) ok() @@ -38,7 +38,7 @@ proc checkTwig*( let proof = ? db.makeStorageProof(accPath, stoPath) vid = ? db.fetchStorageID accPath - key = ? db.computeKey (VertexID(1),vid) + key = ? db.computeKey (STATE_ROOT_VID,vid) discard ? proof[0].verifyProof(key.to(Hash32), stoPath) ok() diff --git a/execution_chain/db/aristo/aristo_constants.nim b/execution_chain/db/aristo/aristo_constants.nim index 7148ada300..efcc669436 100644 --- a/execution_chain/db/aristo/aristo_constants.nim +++ b/execution_chain/db/aristo/aristo_constants.nim @@ -27,19 +27,25 @@ const VOID_HASH_KEY* = HashKey() ## Void equivalent for Merkle hash value - LEAST_FREE_VID* = 100 - ## Vids smaller are used as known state roots and cannot be recycled. Only - ## the `VertexID(1)` state root is used by the `Aristo` methods. The other - ## numbers smaller than `LEAST_FREE_VID` may be used by application - ## functions with fixed assignments of the type of a state root (e.g. for - ## a receipt or a transaction root.) + STATE_ROOT_VID* = VertexID(1) + ## VertexID of state root entry in the MPT + + STATIC_VID_LEVELS* = 8 + ## Number of MPT levels in the account trie that get a fixed VertexID based + ## on the initial nibbles of the path. We'll consume a little bit more than + ## `STATIC_VID_LEVELS*4` bits for the static part of the vid space: + ## + ## STATE_ROOT_VID + 16^0 + 16^1 + ... + 16^STATIC_VID_LEVELS + + FIRST_DYNAMIC_VID* = ## First VertexID of the sparse/dynamic part of the MPT + block: + var v = uint64(STATE_ROOT_VID) + for i in 0..STATIC_VID_LEVELS: + v += 1'u64 shl (i * 4) + v ACC_LRU_SIZE* = 1024 * 1024 ## LRU cache size for accounts that have storage, see `.accLeaves` and ## `.stoLeaves` fields of the main descriptor. -static: - # must stay away from `VertexID(1)` and `VertexID(2)` - doAssert 2 < LEAST_FREE_VID - # End diff --git a/execution_chain/db/aristo/aristo_desc.nim b/execution_chain/db/aristo/aristo_desc.nim index 1d56cd2f82..c27ff3ad47 100644 --- a/execution_chain/db/aristo/aristo_desc.nim +++ b/execution_chain/db/aristo/aristo_desc.nim @@ -125,6 +125,10 @@ type # Debugging data below, might go away in future xMap*: Table[HashKey,RootedVertexID] ## For pretty printing/debugging + staticLevel*: int + ## MPT level where "most" leaves can be found, for static vid lookups + lookups*: tuple[lower, hits, higher: int] + Leg* = object ## For constructing a `VertexPath` wp*: VidVtxPair ## Vertex ID and data ref @@ -238,6 +242,23 @@ proc deltaAtLevel*(db: AristoTxRef, level: int): AristoTxRef = return frame nil +func getStaticLevel*(db: AristoDbRef): int = + # Retrieve the level where we can expect to find a leaf, updating it based on + # recent lookups + + if db.lookups[0] + db.lookups[1] + db.lookups[2] >= 1024: + if db.lookups.lower > db.lookups.hits + db.lookups.higher: + db.staticLevel = max(1, db.staticLevel - 1) + elif db.lookups.higher > db.lookups.hits + db.lookups.lower: + db.staticLevel = min(STATIC_VID_LEVELS, db.staticLevel + 1) + reset(db.lookups) + + if db.staticLevel == 0: + db.staticLevel = 1 + + db.staticLevel + + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/aristo/aristo_fetch.nim b/execution_chain/db/aristo/aristo_fetch.nim index 44aa55c783..82ead79a90 100644 --- a/execution_chain/db/aristo/aristo_fetch.nim +++ b/execution_chain/db/aristo/aristo_fetch.nim @@ -17,7 +17,7 @@ import std/typetraits, eth/common/[base, hashes], results, - "."/[aristo_compute, aristo_desc, aristo_get, aristo_layers, aristo_hike] + "."/[aristo_compute, aristo_desc, aristo_get, aristo_layers, aristo_hike, aristo_vid] # ------------------------------------------------------------------------------ # Private functions @@ -26,9 +26,10 @@ import proc retrieveLeaf( db: AristoTxRef; root: VertexID; - path: Hash32; + path: NibblesBuf; + next = VertexID(0), ): Result[VertexRef,AristoError] = - for step in stepUp(NibblesBuf.fromBytes(path.data), root, db): + for step in stepUp(path, root, db, next): let vtx = step.valueOr: if error in HikeAcceptableStopsNotFound: return err(FetchPathNotFound) @@ -53,6 +54,80 @@ proc cachedStoLeaf*(db: AristoTxRef; mixPath: Hash32): Opt[StoLeafRef] = db.db.stoLeaves.get(mixPath) or Opt.none(StoLeafRef) +proc retrieveAccStatic( + db: AristoTxRef; + accPath: Hash32; + ): Result[(AccLeafRef, NibblesBuf, VertexID),AristoError] = + # A static VertexID essentially splits the path into a prefix encoded in the + # vid and the rest of the path stored as normal - here, instead of traversing + # the trie from the root and selecting a path nibble by nibble we travers the + # trie starting at `staticLevel` and search towards the root until either we + # hit the node we're looking for or at least a branch from which we can + # shorten the lookup. + let staticLevel = db.db.getStaticLevel() + + var path = NibblesBuf.fromBytes(accPath.data) + var next: VertexID + + for sl in countdown(staticLevel, 0): + template countHitOrLower() = + if sl == staticLevel: + db.db.lookups.hits += 1 + else: + db.db.lookups.lower += 1 + + let + svid = path.staticVid(sl) + vtx = db.getVtxRc((STATE_ROOT_VID, svid)).valueOr: + # Either the node doesn't exist or our guess used too many nibbles and + # the trie is not yet this deep at the given path - either way, we'll + # try a less deep guess which will result either in a branch, + # non-matching leaf or more missing verticies. + continue + + case vtx[0].vType + of Leaves: + let vtx = AccLeafRef(vtx[0]) + + countHitOrLower() + return + if vtx.pfx != path.slice(sl): # Same prefix, different path + err FetchPathNotFound + else: + ok (vtx, path, next) + of ExtBranch: + let vtx = ExtBranchRef(vtx[0]) + + if vtx.pfx != path.slice(sl, sl + vtx.pfx.len): # Same prefix, different path + countHitOrLower() + return err FetchPathNotFound + + let nibble = path[sl + vtx.pfx.len] + next = vtx.bVid(nibble) + + if not next.isValid(): + countHitOrLower() + return err FetchPathNotFound + + path = path.slice(sl + vtx.pfx.len + 1) + + break # Continue the search down the branch children, starting at `next` + of Branch: # Same as ExtBranch with vtx.pfx.len == 0! + let vtx = BranchRef(vtx[0]) + + let nibble = path[sl] + next = vtx.bVid(nibble) + + if not next.isValid(): + countHitOrLower() + return err FetchPathNotFound + + path = path.slice(sl + 1) + break # Continue the search down the branch children, starting at `next` + + # We end up here when we have to continue the search down a branch + ok (nil, path, next) + proc retrieveAccLeaf( db: AristoTxRef; accPath: Hash32; @@ -62,14 +137,29 @@ proc retrieveAccLeaf( return err(FetchPathNotFound) return ok leafVtx[] + let (staticVtx, path, next) = db.retrieveAccStatic(accPath).valueOr: + if error == FetchPathNotFound: + db.db.accLeaves.put(accPath, nil) + return err(error) + + if staticVtx.isValid(): + db.db.accLeaves.put(accPath, staticVtx) + return ok staticVtx + # Updated payloads are stored in the layers so if we didn't find them there, # it must have been in the database let - leafVtx = db.retrieveLeaf(VertexID(1), accPath).valueOr: + leafVtx = db.retrieveLeaf(STATE_ROOT_VID, path, next).valueOr: if error == FetchPathNotFound: + # The branch was the deepest level where a vertex actually existed + # meaning that it was a hit - else searches for non-existing paths would + # skew the results towards more depth than exists in the MPT + db.db.lookups.hits += 1 db.db.accLeaves.put(accPath, nil) return err(error) + db.db.lookups.higher += 1 + db.db.accLeaves.put(accPath, AccLeafRef(leafVtx)) ok AccLeafRef(leafVtx) @@ -130,7 +220,7 @@ proc fetchAccountHike*( if leaf == Opt.some(AccLeafRef(nil)): return err(FetchAccInaccessible) - accPath.hikeUp(VertexID(1), db, leaf, accHike).isOkOr: + accPath.hikeUp(STATE_ROOT_VID, db, leaf, accHike).isOkOr: return err(FetchAccInaccessible) # Extract the account payload from the leaf @@ -163,7 +253,8 @@ proc retrieveStoragePayload( # Updated payloads are stored in the layers so if we didn't find them there, # it must have been in the database - let leafVtx = db.retrieveLeaf(? db.fetchStorageIdImpl(accPath), stoPath).valueOr: + let leafVtx = db.retrieveLeaf( + ? db.fetchStorageIdImpl(accPath), NibblesBuf.fromBytes(stoPath.data)).valueOr: if error == FetchPathNotFound: db.db.stoLeaves.put(mixPath, nil) return err(error) @@ -214,7 +305,7 @@ proc fetchStateRoot*( db: AristoTxRef; ): Result[Hash32,AristoError] = ## Fetch the Merkle hash of the account root. - db.retrieveMerkleHash(VertexID(1)) + db.retrieveMerkleHash(STATE_ROOT_VID) proc hasPathAccount*( db: AristoTxRef; diff --git a/execution_chain/db/aristo/aristo_hike.nim b/execution_chain/db/aristo/aristo_hike.nim index f278044217..aba286367d 100644 --- a/execution_chain/db/aristo/aristo_hike.nim +++ b/execution_chain/db/aristo/aristo_hike.nim @@ -124,13 +124,14 @@ iterator stepUp*( path: NibblesBuf; # Partial path root: VertexID; # Start vertex db: AristoTxRef; # Database + next = VertexID(0) ): Result[VertexRef, AristoError] = ## For the argument `path`, iterate over the logest possible path in the ## argument database `db`. var path = path - next = root - vtx: VertexRef + next = if next == VertexID(0): root else: next + vtx = VertexRef(nil) block iter: while true: (vtx, path, next) = step(path, (root, next), db).valueOr: diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim index 612a7276f5..26b41983a4 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim @@ -97,7 +97,7 @@ template toOpenArray*(xid: AdminTabID): openArray[byte] = xid.uint64.toBytesBE.toOpenArray(0,7) template to*(v: RootedVertexID, T: type RdbStateType): RdbStateType = - if v.root == VertexID(1): RdbStateType.World else: RdbStateType.Account + if v.root == STATE_ROOT_VID: RdbStateType.World else: RdbStateType.Account template to*(v: VertexType, T: type RdbVertexType): RdbVertexType = case v diff --git a/execution_chain/db/aristo/aristo_merge.nim b/execution_chain/db/aristo/aristo_merge.nim index 2177aa0d2b..16a015d9bf 100644 --- a/execution_chain/db/aristo/aristo_merge.nim +++ b/execution_chain/db/aristo/aristo_merge.nim @@ -55,6 +55,7 @@ proc mergePayloadImpl[LeafType, T]( ## var path = NibblesBuf.fromBytes(path.data) + pos = 0 cur = root (vtx, _) = db.getVtxRc((root, cur)).valueOr: if error != GetVtxNotFound: @@ -71,12 +72,12 @@ proc mergePayloadImpl[LeafType, T]( for i in 2..vids.len: db.layersResKey((root, vids[^i]), vtxs[^i]) - while path.len > 0: + while pos < path.len: # Clear existing merkle keys along the traversal path vids.add cur vtxs.add vtx - - let n = path.sharedPrefixLen(vtx.pfx) + var psuffix = path.slice(pos) + let n = psuffix.sharedPrefixLen(vtx.pfx) case vtx.vType of Leaves: let res = @@ -86,22 +87,28 @@ proc mergePayloadImpl[LeafType, T]( when payload is AristoAccount: if AccLeafRef(vtx).account == payload: return err(MergeNoAction) - let leafVtx = db.layersPutLeaf((root, cur), path, payload) + let leafVtx = db.layersPutLeaf((root, cur), psuffix, payload) leafVtx.stoID = AccLeafRef(vtx).stoID else: if StoLeafRef(vtx).stoData == payload: return err(MergeNoAction) - let leafVtx = db.layersPutLeaf((root, cur), path, payload) + let leafVtx = db.layersPutLeaf((root, cur), psuffix, payload) (leafVtx, nil, nil) else: # Turn leaf into a branch (or extension) then insert the two leaves # into the branch - let branch = - if n > 0: - ExtBranchRef.init(path.slice(0, n), db.vidFetch(16), 0) - else: - BranchRef.init(db.vidFetch(16), 0) + let + startVid = + if root == STATE_ROOT_VID: + db.accVidFetch(path.slice(0, pos + n) & NibblesBuf.nibble(0), 16) + else: + db.vidFetch(16) + branch = + if n > 0: + ExtBranchRef.init(psuffix.slice(0, n), startVid, 0) + else: + BranchRef.init(startVid, 0) let other = block: # Copy of existing leaf node, now one level deeper let local = branch.setUsed(vtx.pfx[n], true) @@ -114,8 +121,8 @@ proc mergePayloadImpl[LeafType, T]( db.layersPutLeaf((root, local), pfx, StoLeafRef(vtx).stoData) let leafVtx = block: # Newly inserted leaf node - let local = branch.setUsed(path[n], true) - db.layersPutLeaf((root, local), path.slice(n + 1), payload) + let local = branch.setUsed(psuffix[n], true) + db.layersPutLeaf((root, local), psuffix.slice(n + 1), payload) # Put the branch at the vid where the leaf was db.layersPutVtx((root, cur), branch) @@ -130,14 +137,15 @@ proc mergePayloadImpl[LeafType, T]( if vtx.pfx.len == n: # The existing branch is a prefix of the new entry let - nibble = path[vtx.pfx.len] + nibble = psuffix[vtx.pfx.len] next = BranchRef(vtx).bVid(nibble) if next.isValid: cur = next - path = path.slice(n + 1) + psuffix = psuffix.slice(n + 1) + pos += n + 1 vtx = - if leaf.isSome and leaf[].isValid and leaf[].pfx == path: + if leaf.isSome and leaf[].isValid and leaf[].pfx == psuffix: leaf[] else: (?db.getVtxRc((root, next)))[0] @@ -149,18 +157,24 @@ proc mergePayloadImpl[LeafType, T]( let local = BranchRef(brDup).setUsed(nibble, true) db.layersPutVtx((root, cur), brDup) - let leafVtx = db.layersPutLeaf((root, local), path.slice(n + 1), payload) + let leafVtx = db.layersPutLeaf((root, local), psuffix.slice(n + 1), payload) resetKeys() return ok((leafVtx, nil, nil)) else: # Partial path match - we need to split the existing branch at # the point of divergence, inserting a new branch - let branch = - if n > 0: - ExtBranchRef.init(path.slice(0, n), db.vidFetch(16), 0) - else: - BranchRef.init(db.vidFetch(16), 0) + let + startVid = + if root == STATE_ROOT_VID: + db.accVidFetch(path.slice(0, pos + n) & NibblesBuf.nibble(0), 16) + else: + db.vidFetch(16) + branch = + if n > 0: + ExtBranchRef.init(psuffix.slice(0, n), startVid, 0) + else: + BranchRef.init(startVid, 0) block: # Copy the existing vertex and add it to the new branch let @@ -176,8 +190,8 @@ proc mergePayloadImpl[LeafType, T]( ) let leafVtx = block: # add the new entry - let local = branch.setUsed(path[n], true) - db.layersPutLeaf((root, local), path.slice(n + 1), payload) + let local = branch.setUsed(psuffix[n], true) + db.layersPutLeaf((root, local), psuffix.slice(n + 1), payload) db.layersPutVtx((root, cur), branch) @@ -196,14 +210,14 @@ proc mergeAccountRecord*( accRec: AristoAccount; # Account data ): Result[bool,AristoError] = ## Merge the key-value-pair argument `(accKey,accRec)` as an account - ## ledger value, i.e. the the sub-tree starting at `VertexID(1)`. + ## ledger value, i.e. the the sub-tree starting at `STATE_ROOT_VID`. ## ## On success, the function returns `true` if the `accRec` argument was ## not on the database already or different from `accRec`, and `false` ## otherwise. ## let updated = db.mergePayloadImpl( - VertexID(1), accPath, db.cachedAccLeaf(accPath), accRec + STATE_ROOT_VID, accPath, db.cachedAccLeaf(accPath), accRec ).valueOr: if error == MergeNoAction: return ok false @@ -269,7 +283,7 @@ proc mergeStorageData*( let leaf = AccLeafRef(accHike.legs[^1].wp.vtx).dup # Dup on modify leaf.stoID = useID db.layersPutAccLeaf(accPath, leaf) - db.layersPutVtx((VertexID(1), accHike.legs[^1].wp.vid), leaf) + db.layersPutVtx((STATE_ROOT_VID, accHike.legs[^1].wp.vid), leaf) ok() diff --git a/execution_chain/db/aristo/aristo_proof.nim b/execution_chain/db/aristo/aristo_proof.nim index f2845fe63f..ca8489ddf0 100644 --- a/execution_chain/db/aristo/aristo_proof.nim +++ b/execution_chain/db/aristo/aristo_proof.nim @@ -147,7 +147,7 @@ proc makeAccountProof*( db: AristoTxRef; accPath: Hash32; ): Result[(seq[seq[byte]],bool), AristoError] = - db.makeProof(VertexID(1), NibblesBuf.fromBytes accPath.data) + db.makeProof(STATE_ROOT_VID, NibblesBuf.fromBytes accPath.data) proc makeStorageProof*( db: AristoTxRef; diff --git a/execution_chain/db/aristo/aristo_vid.nim b/execution_chain/db/aristo/aristo_vid.nim index a5b4d69bfe..474041cc58 100644 --- a/execution_chain/db/aristo/aristo_vid.nim +++ b/execution_chain/db/aristo/aristo_vid.nim @@ -13,23 +13,54 @@ ## {.push raises: [].} -import - ./aristo_desc +import ./aristo_desc + +export aristo_desc # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ +proc staticVid*(accPath: NibblesBuf, level: int): VertexID = + ## Compute a static vid based on the initial nibbles of the given path. The + ## vid assignment is done in a breadth-first manner where numerically, each + ## level follows the previous one meaning that the root occupies VertexID(1), + ## its direct children 2-17 etc. + ## + ## The level-based sorting ensures that children of each level are colocated + ## on disk reducing the number of disk reads needed to load all children of a + ## node which is useful when computing hash keys. + if level == 0: + STATE_ROOT_VID + else: + var v = uint64(STATE_ROOT_VID) + for i in 0 ..< level: + v += 1'u64 shl (i * 4) + + v += uint64(accPath[i]) shl ((level - i - 1) * 4) + + VertexID(v) + proc vidFetch*(db: AristoTxRef, n = 1): VertexID = ## Fetch next vertex ID. ## - if db.vTop == 0: - db.vTop = VertexID(LEAST_FREE_VID) + if db.vTop == 0: + db.vTop = VertexID(FIRST_DYNAMIC_VID - 1) var ret = db.vTop ret.inc db.vTop.inc(n) ret +proc accVidFetch*(db: AristoTxRef, path: NibblesBuf, n = 1): VertexID = + ## Fetch next vertex ID. + ## + let res = + if path.len <= STATIC_VID_LEVELS: + path.staticVid(path.len) + else: + db.vidFetch(n) + res + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/core_db/backend/aristo_rocksdb.nim b/execution_chain/db/core_db/backend/aristo_rocksdb.nim index 39969b1c2e..190f3b32de 100644 --- a/execution_chain/db/core_db/backend/aristo_rocksdb.nim +++ b/execution_chain/db/core_db/backend/aristo_rocksdb.nim @@ -165,7 +165,7 @@ proc newRocksDbCoreDbRef*(basePath: string, opts: DbOptions): CoreDbRef = if opts.rdbKeyCacheSize > 0: # Make sure key cache isn't empty - adb.txRef.computeKeys(VertexID(1)).isOkOr: + adb.txRef.computeKeys(STATE_ROOT_VID).isOkOr: fatal "Cannot compute root keys", msg = error quit(QuitFailure) diff --git a/tests/test_aristo.nim b/tests/test_aristo.nim index 586c5171db..9fe978650d 100644 --- a/tests/test_aristo.nim +++ b/tests/test_aristo.nim @@ -8,7 +8,7 @@ # at your option. This file may not be copied, modified, or # distributed except according to those terms. -import ./test_aristo/[test_blobify, test_compute, test_tx_frame] +import ./test_aristo/[test_blobify, test_compute, test_tx_frame, test_vid] # ------------------------------------------------------------------------------ # End diff --git a/tests/test_aristo/test_compute.nim b/tests/test_aristo/test_compute.nim index 4b289b63f3..88d9c55b6c 100644 --- a/tests/test_aristo/test_compute.nim +++ b/tests/test_aristo/test_compute.nim @@ -74,7 +74,7 @@ suite "Aristo compute": let db = AristoDbRef.init() txFrame = db.txRef - root = VertexID(1) + root = STATE_ROOT_VID for (k, v, r) in sample: checkpoint("k = " & k.toHex & ", v = " & $v) @@ -113,7 +113,7 @@ suite "Aristo compute": let db = AristoDbRef.init() txFrame = db.txRef - root = VertexID(1) + root = STATE_ROOT_VID for (k, v, r) in samples[^1]: check: diff --git a/tests/test_aristo/test_tx_frame.nim b/tests/test_aristo/test_tx_frame.nim index c7a08fa0c7..b0caba6103 100644 --- a/tests/test_aristo/test_tx_frame.nim +++ b/tests/test_aristo/test_tx_frame.nim @@ -78,7 +78,7 @@ suite "Aristo TxFrame": # we find the vtx should be one below tx2c! ( tx2c.level - - tx2c.layersGetVtx((VertexID(1), acc1Hike.legs[^1].wp.vid)).value()[1] + tx2c.layersGetVtx((STATE_ROOT_VID, acc1Hike.legs[^1].wp.vid)).value()[1] ) == 1 tx0.checkpoint(1, skipSnapshot = false) @@ -91,7 +91,7 @@ suite "Aristo TxFrame": # Even after checkpointing, we should maintain the same relative levels ( tx2c.level - - tx2c.layersGetVtx((VertexID(1), acc1Hike.legs[^1].wp.vid)).value()[1] + tx2c.layersGetVtx((STATE_ROOT_VID, acc1Hike.legs[^1].wp.vid)).value()[1] ) == 1 let batch = db.putBegFn().expect("working batch") diff --git a/tests/test_aristo/test_vid.nim b/tests/test_aristo/test_vid.nim new file mode 100644 index 0000000000..34839922d5 --- /dev/null +++ b/tests/test_aristo/test_vid.nim @@ -0,0 +1,62 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or +# distributed except according to those terms. + +{.used.} + +import + unittest2, + eth/trie/nibbles, + ../../execution_chain/db/aristo/[aristo_constants, aristo_vid] + +suite "Aristo VertexID": + test "Static basics": + var buf = NibblesBuf.fromBytes([]) + check: + buf.staticVid(0) == STATE_ROOT_VID + + buf = buf & NibblesBuf.nibble(byte 2) + + check: + buf.staticVid(0) == STATE_ROOT_VID + buf.staticVid(1) == STATE_ROOT_VID + 1 + 2 + + buf = buf & NibblesBuf.nibble(byte 4) + + check: + buf.staticVid(0) == STATE_ROOT_VID + buf.staticVid(1) == STATE_ROOT_VID + 1 + 2 + buf.staticVid(2) == STATE_ROOT_VID + 1 + 16 + 0x24 + + var + buf2 = NibblesBuf.nibble(0) & buf + var + buf3 = NibblesBuf.nibble(1) & buf + + check: + buf2.staticVid(3) != buf3.staticVid(3) + + test "fetching": + var buf = NibblesBuf.fromBytes([]) + let txFrame = AristoTxRef() + + check: + txFrame.accVidFetch(buf) == STATE_ROOT_VID + + buf = buf & NibblesBuf.nibble(byte 2) + + check: + txFrame.accVidFetch(buf) == STATE_ROOT_VID + 1 + 2 + + while buf.len <= STATIC_VID_LEVELS: + buf = buf & NibblesBuf.nibble(byte 2) + + check: + txFrame.accVidFetch(buf) == VertexID(FIRST_DYNAMIC_VID) + txFrame.accVidFetch(buf) == VertexID(FIRST_DYNAMIC_VID + 1) From 33cf09dc799cf5c9ee3d8baa93b1c831bcb7d490 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Thu, 29 May 2025 15:53:33 +0530 Subject: [PATCH 051/138] drop logsBloom support (#3292) * drop logsBloom support * fix: fluffy and t8n * fix: multi-type * remove commented out code * bump nim-eth to --- execution_chain/core/chain/forked_chain.nim | 4 ++-- .../core/chain/forked_chain/chain_branch.nim | 8 ++++---- .../core/chain/forked_chain/chain_private.nim | 4 ++-- execution_chain/core/executor/executor_helpers.nim | 7 +++---- execution_chain/db/core_db/core_apps.nim | 10 +++++----- execution_chain/evm/types.nim | 2 +- execution_chain/rpc/filters.nim | 9 +++++---- execution_chain/rpc/oracle.nim | 2 +- execution_chain/rpc/rpc_utils.nim | 6 ++++-- execution_chain/sync/wire_protocol/handler.nim | 4 ++-- execution_chain/utils/utils.nim | 6 +++++- scripts/check_copyright_year.sh | 0 tests/test_block_fixture.nim | 10 ++++++---- tools/t8n/transition.nim | 10 ++++++---- 14 files changed, 46 insertions(+), 36 deletions(-) mode change 100644 => 100755 scripts/check_copyright_year.sh diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index a0cdcc5220..0d1653dd2d 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -61,7 +61,7 @@ func updateBranch(c: ForkedChainRef, blk: Block, blkHash: Hash32, txFrame: CoreDbTxRef, - receipts: sink seq[Receipt]) = + receipts: sink seq[StoredReceipt]) = if parent.isHead: parent.appendBlock(blk, blkHash, txFrame, move(receipts)) c.hashToBlock[blkHash] = parent.lastBlockPos @@ -919,7 +919,7 @@ proc blockHeader*(c: ForkedChainRef, blk: BlockHashOrNumber): Result[Header, str return c.headerByHash(blk.hash) c.headerByNumber(blk.number) -proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[Receipt], string] = +proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[StoredReceipt], string] = if blockHash != c.baseBranch.tailHash: c.hashToBlock.withValue(blockHash, loc): return ok(loc[].receipts) diff --git a/execution_chain/core/chain/forked_chain/chain_branch.nim b/execution_chain/core/chain/forked_chain/chain_branch.nim index 326a3415a2..6eef09b62c 100644 --- a/execution_chain/core/chain/forked_chain/chain_branch.nim +++ b/execution_chain/core/chain/forked_chain/chain_branch.nim @@ -19,7 +19,7 @@ type BlockDesc* = object blk* : Block txFrame* : CoreDbTxRef - receipts*: seq[Receipt] + receipts*: seq[StoredReceipt] hash* : Hash32 BlockPos* = object @@ -105,7 +105,7 @@ func branch*(header: Header, hash: Hash32, txFrame: CoreDbTxRef): BranchRef = func branch*(parent: BranchRef, blk: Block, hash: Hash32, txFrame: CoreDbTxRef, - receipts: sink seq[Receipt]): BranchRef = + receipts: sink seq[StoredReceipt]): BranchRef = BranchRef( blocks: @[BlockDesc( blk: blk, @@ -126,7 +126,7 @@ func header*(loc: BlockPos): Header = func blk*(loc: BlockPos): Block = loc.branch.blocks[loc.index].blk -func receipts*(loc: BlockPos): seq[Receipt] = +func receipts*(loc: BlockPos): seq[StoredReceipt] = loc.branch.blocks[loc.index].receipts func number*(loc: BlockPos): BlockNumber = @@ -154,7 +154,7 @@ func appendBlock*(loc: BlockPos, blk: Block, blkHash: Hash32, txFrame: CoreDbTxRef, - receipts: sink seq[Receipt]) = + receipts: sink seq[StoredReceipt]) = loc.branch.append(BlockDesc( blk : blk, txFrame : txFrame, diff --git a/execution_chain/core/chain/forked_chain/chain_private.nim b/execution_chain/core/chain/forked_chain/chain_private.nim index f5ea4c10bd..857518b1d0 100644 --- a/execution_chain/core/chain/forked_chain/chain_private.nim +++ b/execution_chain/core/chain/forked_chain/chain_private.nim @@ -21,7 +21,7 @@ import proc writeBaggage*(c: ForkedChainRef, blk: Block, blkHash: Hash32, txFrame: CoreDbTxRef, - receipts: openArray[Receipt]) = + receipts: openArray[StoredReceipt]) = template header(): Header = blk.header @@ -58,7 +58,7 @@ proc processBlock*(c: ForkedChainRef, txFrame: CoreDbTxRef, blk: Block, blkHash: Hash32, - finalized: bool): Result[seq[Receipt], string] = + finalized: bool): Result[seq[StoredReceipt], string] = template header(): Header = blk.header diff --git a/execution_chain/core/executor/executor_helpers.nim b/execution_chain/core/executor/executor_helpers.nim index a3d2294931..aef116f160 100644 --- a/execution_chain/core/executor/executor_helpers.nim +++ b/execution_chain/core/executor/executor_helpers.nim @@ -41,15 +41,15 @@ func logsBloom(logs: openArray[Log]): LogsBloom = # Public functions # ------------------------------------------------------------------------------ -func createBloom*(receipts: openArray[Receipt]): Bloom = +func createBloom*(receipts: openArray[StoredReceipt]): Bloom = var bloom: LogsBloom for rec in receipts: bloom.value = bloom.value or logsBloom(rec.logs).value bloom.value.to(Bloom) proc makeReceipt*( - vmState: BaseVMState; txType: TxType, callResult: LogResult): Receipt = - var rec: Receipt + vmState: BaseVMState; txType: TxType, callResult: LogResult): StoredReceipt = + var rec: StoredReceipt if vmState.com.isByzantiumOrLater(vmState.blockNumber): rec.isHash = false rec.status = vmState.status @@ -62,7 +62,6 @@ proc makeReceipt*( rec.receiptType = txType rec.cumulativeGasUsed = vmState.cumulativeGasUsed assign(rec.logs, callResult.logEntries) - rec.logsBloom = logsBloom(rec.logs).value.to(Bloom) rec # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/core_db/core_apps.nim b/execution_chain/db/core_db/core_apps.nim index 49f5fa9078..29c7e1ceb5 100644 --- a/execution_chain/db/core_db/core_apps.nim +++ b/execution_chain/db/core_db/core_apps.nim @@ -129,7 +129,7 @@ iterator getWithdrawals*( iterator getReceipts*( db: CoreDbTxRef; receiptsRoot: Hash32; - ): Receipt + ): StoredReceipt {.gcsafe, raises: [RlpError].} = block body: if receiptsRoot == EMPTY_ROOT_HASH: @@ -142,7 +142,7 @@ iterator getReceipts*( break body if data.len == 0: break body - yield rlp.decode(data, Receipt) + yield rlp.decode(data, StoredReceipt) # ------------------------------------------------------------------------------ # Public functions @@ -493,7 +493,7 @@ proc setHead*( proc persistReceipts*( db: CoreDbTxRef; receiptsRoot: Hash32; - receipts: openArray[Receipt]; + receipts: openArray[StoredReceipt]; ) = const info = "persistReceipts()" if receipts.len == 0: @@ -507,9 +507,9 @@ proc persistReceipts*( proc getReceipts*( db: CoreDbTxRef; receiptsRoot: Hash32; - ): Result[seq[Receipt], string] = + ): Result[seq[StoredReceipt], string] = wrapRlpException "getReceipts": - var receipts = newSeq[Receipt]() + var receipts = newSeq[StoredReceipt]() for r in db.getReceipts(receiptsRoot): receipts.add(r) return ok(receipts) diff --git a/execution_chain/evm/types.nim b/execution_chain/evm/types.nim index 7a936f3347..12e5f2304b 100644 --- a/execution_chain/evm/types.nim +++ b/execution_chain/evm/types.nim @@ -49,7 +49,7 @@ type flags* : set[VMFlag] fork* : EVMFork tracer* : TracerRef - receipts* : seq[Receipt] + receipts* : seq[StoredReceipt] cumulativeGasUsed*: GasInt gasCosts* : GasCosts blobGasUsed* : uint64 diff --git a/execution_chain/rpc/filters.nim b/execution_chain/rpc/filters.nim index 46d60a3fe5..6bd2ad5c52 100644 --- a/execution_chain/rpc/filters.nim +++ b/execution_chain/rpc/filters.nim @@ -64,9 +64,9 @@ proc match*( proc deriveLogs*( header: Header, transactions: openArray[Transaction], - receipts: openArray[Receipt], + receipts: openArray[StoredReceipt | Receipt], filterOptions: FilterOptions, - txHashes: Opt[seq[Hash32]] = Opt.none(seq[Hash32]) + txHashes: Opt[seq[Hash32]] = Opt.none(seq[Hash32]), ): seq[FilterLog] = ## Derive log fields, does not deal with pending log, only the logs with ## full data set @@ -81,12 +81,13 @@ proc deriveLogs*( var logIndex = 0'u64 for i, receipt in receipts: - let logs = receipt.logs.filterIt(it.match(filterOptions.address, filterOptions.topics)) + let logs = + receipt.logs.filterIt(it.match(filterOptions.address, filterOptions.topics)) if logs.len > 0: # TODO avoid recomputing entirely - we should have this cached somewhere let txHash = if txHashes.isSome: - txHashes.get[i] # cached txHashes + txHashes.get[i] # cached txHashes else: transactions[i].computeRlpHash diff --git a/execution_chain/rpc/oracle.nim b/execution_chain/rpc/oracle.nim index 6a8498a131..ebe39d7f21 100644 --- a/execution_chain/rpc/oracle.nim +++ b/execution_chain/rpc/oracle.nim @@ -44,7 +44,7 @@ type blockNumber: uint64 header : Header txs : seq[Transaction] - receipts : seq[Receipt] + receipts : seq[StoredReceipt] CacheKey = object number: uint64 diff --git a/execution_chain/rpc/rpc_utils.nim b/execution_chain/rpc/rpc_utils.nim index b569d9fd5e..52982f2bd4 100644 --- a/execution_chain/rpc/rpc_utils.nim +++ b/execution_chain/rpc/rpc_utils.nim @@ -179,9 +179,11 @@ proc populateBlockObject*(blockHash: Hash32, result.excessBlobGas = w3Qty(header.excessBlobGas) result.requestsHash = header.requestsHash -proc populateReceipt*(receipt: Receipt, gasUsed: GasInt, tx: Transaction, +proc populateReceipt*(rec: StoredReceipt, gasUsed: GasInt, tx: Transaction, txIndex: uint64, header: Header, com: CommonRef): ReceiptObject = - let sender = tx.recoverSender() + let + sender = tx.recoverSender() + receipt = rec.to(Receipt) var res = ReceiptObject() res.transactionHash = tx.computeRlpHash res.transactionIndex = Quantity(txIndex) diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index b5fe62a4d5..7047a41b4e 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -89,7 +89,7 @@ proc getReceipts*(ctx: EthWireRef, continue totalBytes += getEncodedLength(receiptList) - list.add(move(receiptList)) + list.add(receiptList.to(seq[Receipt])) if list.len >= MAX_RECEIPTS_SERVE or totalBytes > SOFT_RESPONSE_LIMIT: @@ -109,7 +109,7 @@ proc getStoredReceipts*(ctx: EthWireRef, continue totalBytes += getEncodedLength(receiptList) - list.add(receiptList.to(seq[StoredReceipt])) + list.add(move(receiptList)) if list.len >= MAX_RECEIPTS_SERVE or totalBytes > SOFT_RESPONSE_LIMIT: diff --git a/execution_chain/utils/utils.nim b/execution_chain/utils/utils.nim index 334a8c9e5a..50fc3f3350 100644 --- a/execution_chain/utils/utils.nim +++ b/execution_chain/utils/utils.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2018-2024 Status Research & Development GmbH +# Copyright (c) 2018-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -30,6 +30,10 @@ template calcTxRoot*(transactions: openArray[Transaction]): Root = template calcWithdrawalsRoot*(withdrawals: openArray[Withdrawal]): Root = orderedTrieRoot(withdrawals) +template calcReceiptsRoot*(receipts: openArray[StoredReceipt]): Root = + let recs = receipts.to(seq[Receipt]) + orderedTrieRoot(recs) + template calcReceiptsRoot*(receipts: openArray[Receipt]): Root = orderedTrieRoot(receipts) diff --git a/scripts/check_copyright_year.sh b/scripts/check_copyright_year.sh old mode 100644 new mode 100755 diff --git a/tests/test_block_fixture.nim b/tests/test_block_fixture.nim index 6a6b1d4f72..d81cca5b49 100644 --- a/tests/test_block_fixture.nim +++ b/tests/test_block_fixture.nim @@ -1,4 +1,4 @@ -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) # * MIT license ([LICENSE-MIT](LICENSE-MIT)) @@ -19,7 +19,8 @@ var receitsRlp = rlpb.listElem(2) let blockHeader4514995* = headerRlp.read(Header) let blockBody4514995* = bodyRlp.read(BlockBody) -let receipts4514995* = receitsRlp.read(seq[Receipt]) +let recs = receitsRlp.read(seq[Receipt]) +let receipts4514995* = recs.to(seq[StoredReceipt]) proc getBlockHeader4514995*(): Header {.gcsafe.} = var headerRlp = rlpb.listElem(0) @@ -29,6 +30,7 @@ proc getBlockBody4514995*(): BlockBody {.gcsafe.} = var bodyRlp = rlpb.listElem(1) return bodyRlp.read(BlockBody) -proc getReceipts4514995*(): seq[Receipt] {.gcsafe.} = +proc getReceipts4514995*(): seq[StoredReceipt] {.gcsafe.} = var receitsRlp = rlpb.listElem(2) - return receitsRlp.read(seq[Receipt]) + let recs = receitsRlp.read(seq[Receipt]) + return recs.to(seq[StoredReceipt]) diff --git a/tools/t8n/transition.nim b/tools/t8n/transition.nim index d4cdf308c8..540e5df930 100644 --- a/tools/t8n/transition.nim +++ b/tools/t8n/transition.nim @@ -119,13 +119,15 @@ proc genAddress(tx: Transaction, sender: Address): Address = if tx.to.isNone: result = generateAddress(sender, tx.nonce) -proc toTxReceipt(rec: Receipt, +proc toTxReceipt(receipt: StoredReceipt, tx: Transaction, sender: Address, txIndex: int, gasUsed: GasInt): TxReceipt = - let contractAddress = genAddress(tx, sender) + let + contractAddress = genAddress(tx, sender) + rec = receipt.to(Receipt) TxReceipt( txType: tx.txType, root: if rec.isHash: rec.hash else: default(Hash32), @@ -140,7 +142,7 @@ proc toTxReceipt(rec: Receipt, transactionIndex: txIndex ) -proc calcLogsHash(receipts: openArray[Receipt]): Hash32 = +proc calcLogsHash(receipts: openArray[StoredReceipt]): Hash32 = var logs: seq[Log] for rec in receipts: logs.add rec.logs @@ -231,7 +233,7 @@ proc exec(ctx: TransContext, vmState.mutateLedger: db.applyDAOHardFork() - vmState.receipts = newSeqOfCap[Receipt](ctx.txList.len) + vmState.receipts = newSeqOfCap[StoredReceipt](ctx.txList.len) vmState.cumulativeGasUsed = 0 if ctx.env.parentBeaconBlockRoot.isSome: From 953a6c377366cf7c7bbb6084118a147b542612d2 Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Thu, 29 May 2025 15:55:34 +0000 Subject: [PATCH 052/138] Beacon sync code update and better handling of block errors (#3345) * Extract header chain storage request to separate file why This makes it easy to maintain comparable log messages whenever it is called. This works similar similar to the `blocksImport()` logging. * Expand/replace some getters and/or global shortcuts also Cleanup of `worker_desc` descriptors why Most of the getters removed were introduced when the syncer state started to use the header chain cache to make the migration easier. On the *con* side, this hid the source of the state variables which needed a bit more attention when maintaining/updating the code. Also, the header chain cache state variable `head` is now fully cached locally. It will not change for the current scrum/sprint. * Reorg error registers and its administration why This patch provides a common register for both, blocks and headers processing errors. This allows a processing peer to refer back an error to the peer that provided the header or block that produced a processing error later. Consequently, a sync peer might be banned so that the same trouble causing object is not fetched again. * Code cosmetics: rename config constants why Previous naming was sort of inconsistent and felt random * Better handling of bogus block bodies why So far the whole sync process was aborted on block import errors. As these import errors occurred sporadically on `hoodi`, the better approach seems to be to skip the rest of the current blocks list, ban (aka zombify) the peer where the last block was sourced from and reload the list again. Only after several failed attempts of the above the sync process will be aborted. * Update block import termination criterium * Expose last imported block number via metrics why This is the block number of the latest successfully imported and executed block. The exposure within metrics might help for some debugging or scrutiny. * Reduce number of blocks requested at a time via ethXX to 40 why This reduces the upper bound for in-memory cache space. Choose 40 as the number of blocks requests is supported by the sample statistics below on `mainnet` (starting at block height ~#22,260,000 before reducing to 40) n=2512 mean=25.6135 stddev=6.86928 As it is expected (and occasionally observed) that the number of larger blocks is reduced in ethXX responses compared to smaller block sizes, this reduction of requested blocks has probably no effect on responses for larger block sizes. Note that on `hoodi` the previous value of 64 is more or less used when available (starting sync at genesis) n=316 mean=63.2089 stddev=4.92996 --- execution_chain/sync/beacon/README.md | 8 +- execution_chain/sync/beacon/TODO.md | 23 --- .../sync/beacon/worker/blocks_staged.nim | 57 +++--- .../worker/blocks_staged/bodies_fetch.nim | 16 +- .../worker/blocks_staged/staged_blocks.nim | 100 ++++++++--- .../sync/beacon/worker/headers_staged.nim | 49 ++--- .../worker/headers_staged/headers_fetch.nim | 14 +- .../worker/headers_staged/staged_collect.nim | 92 ++++------ .../worker/headers_staged/staged_headers.nim | 70 ++++++++ .../worker/headers_staged/staged_queue.nim | 2 +- .../sync/beacon/worker/helpers.nim | 5 + .../sync/beacon/worker/start_stop.nim | 22 ++- execution_chain/sync/beacon/worker/update.nim | 57 +++--- .../sync/beacon/worker/update/metrics.nim | 14 +- .../sync/beacon/worker/update/ticker.nim | 20 +-- execution_chain/sync/beacon/worker_const.nim | 80 ++++----- execution_chain/sync/beacon/worker_desc.nim | 167 +++++++++--------- 17 files changed, 454 insertions(+), 342 deletions(-) delete mode 100644 execution_chain/sync/beacon/TODO.md create mode 100644 execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim diff --git a/execution_chain/sync/beacon/README.md b/execution_chain/sync/beacon/README.md index 45fac3d7b6..5f7805060a 100644 --- a/execution_chain/sync/beacon/README.md +++ b/execution_chain/sync/beacon/README.md @@ -74,6 +74,10 @@ symbol on the left. Single letter symbol have the following meaning: * *C* -- coupler, least possible endpoint *D* of the chain of headers to be fetched and and linked. +* *I* -- imported, last block that was sucsessfully imported into the **FC** + module (this symbol is potentally used in code comments of the + implementation, only.) + * *L* -- **latest**, current value of this entity (with the same name) of the **FC** module (i.e. the current value when looked up.) *L* need not be a parent of any header of the linked chain `D..H` as both, *L* and @@ -89,7 +93,8 @@ symbol on the left. Single letter symbol have the following meaning: * *T* -- cached value of the last *consensus head* request (interpreted as *sync to new head* instruction) sent from the **CL** via RPC (this - symbol is used in code comments of the implementation.) + symbol is potentally used in code comments of the implementation, + only.) ### Sync Processing @@ -280,6 +285,7 @@ be available if *nimbus* is compiled with the additional make flags | nec_execution_head | block height | **L**, *increasing* | | nec_sync_coupler | block height | **C**, *0 when idle* | | nec_sync_dangling | block height | **D**, *0 when idle* | +| nec_sync_last_block_imported | block height | **I**, *0 when idle* | | nec_sync_head | block height | **H**, *0 when idle* | | nec_sync_consensus_head | block height | **T**, *increasing* | | | | | diff --git a/execution_chain/sync/beacon/TODO.md b/execution_chain/sync/beacon/TODO.md deleted file mode 100644 index 9bf37d5cac..0000000000 --- a/execution_chain/sync/beacon/TODO.md +++ /dev/null @@ -1,23 +0,0 @@ -## General TODO items - -* Update/resolve code fragments which are tagged FIXME - -## Open issues - -### 1. Weird behaviour of the RPC/engine API - -See issue [#2816](https://github.com/status-im/nimbus-eth1/issues/2816) - -### 2. Mem overflow possible on small breasted systems - -Running the exe client, a 1.5G response message was opbserved (on my 8G test system this kills the program as it has already 80% mem load. It happens while syncing holesky at around block #184160 and is reproducible on the 8G system but not yet on the an 80G system.) - - [..] - DBG 2024-11-20 16:16:18.871+00:00 Processing JSON-RPC request file=router.nim:135 id=178 name=eth_getLogs - DBG 2024-11-20 16:16:18.915+00:00 Returning JSON-RPC response file=router.nim:137 id=178 name=eth_getLogs len=201631 - TRC 2024-11-20 16:16:18.951+00:00 <<< find_node from topics="eth p2p discovery" file=discovery.nim:248 node=Node[94.16.123.192:30303] - TRC 2024-11-20 16:16:18.951+00:00 Neighbours to topics="eth p2p discovery" file=discovery.nim:161 node=Node[94.16.123.192:30303] nodes=[..] - TRC 2024-11-20 16:16:18.951+00:00 Neighbours to topics="eth p2p discovery" file=discovery.nim:161 node=Node[94.16.123.192:30303] nodes=[..] - DBG 2024-11-20 16:16:19.027+00:00 Received JSON-RPC request topics="JSONRPC-HTTP-SERVER" file=httpserver.nim:52 address=127.0.0.1:49746 len=239 - DBG 2024-11-20 16:16:19.027+00:00 Processing JSON-RPC request file=router.nim:135 id=179 name=eth_getLogs - DBG 2024-11-20 16:20:23.664+00:00 Returning JSON-RPC response file=router.nim:137 id=179 name=eth_getLogs len=1630240149 diff --git a/execution_chain/sync/beacon/worker/blocks_staged.nim b/execution_chain/sync/beacon/worker/blocks_staged.nim index 1a8850f685..7a7e5e5d9b 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged.nim @@ -51,9 +51,9 @@ proc blocksStagedProcessImpl( # Make sure that the lowest block is available, already. Or the other way # round: no unprocessed block number range precedes the least staged block. let minNum = qItem.data.blocks[0].header.number - if ctx.blk.topImported + 1 < minNum: + if ctx.subState.top + 1 < minNum: trace info & ": block queue not ready yet", peer=($maybePeer), - topImported=ctx.blk.topImported.bnStr, qItem=qItem.data.blocks.bnStr, + topImported=ctx.subState.top.bnStr, qItem=qItem.data.blocks.bnStr, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies switchPeer = true # there is a gap -- come back later break @@ -62,24 +62,25 @@ proc blocksStagedProcessImpl( discard ctx.blk.staged.delete qItem.key # Import blocks list - await ctx.blocksImport(maybePeer, qItem.data.blocks, info) + await ctx.blocksImport( + maybePeer, qItem.data.blocks, qItem.data.peerID, info) # Import probably incomplete, so a partial roll back may be needed let lastBn = qItem.data.blocks[^1].header.number - if ctx.blk.topImported < lastBn: - ctx.blocksUnprocAppend(ctx.blk.topImported+1, lastBn) + if ctx.subState.top < lastBn: + ctx.blocksUnprocAppend(ctx.subState.top + 1, lastBn) - nImported += ctx.blk.topImported - minNum + 1 + nImported += ctx.subState.top - minNum + 1 # End while loop if 0 < nImported: info "Blocks serialised and imported", - topImported=ctx.blk.topImported.bnStr, nImported, + topImported=ctx.subState.top.bnStr, nImported, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer elif 0 < ctx.blk.staged.len and not switchPeer: trace info & ": no blocks unqueued", peer=($maybePeer), - topImported=ctx.blk.topImported.bnStr, nStagedQ=ctx.blk.staged.len, + topImported=ctx.subState.top.bnStr, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies return not switchPeer @@ -152,12 +153,12 @@ proc blocksStagedCollect*( # ----------| already imported into `FC` module # topImported bottom # - if ctx.blk.topImported < bottom: + if ctx.subState.top < bottom: break # Throw away overlap (should not happen anyway) - if bottom < ctx.blk.topImported: - discard ctx.blocksUnprocFetch(ctx.blk.topImported - bottom).expect("iv") + if bottom < ctx.subState.top: + discard ctx.blocksUnprocFetch(ctx.subState.top - bottom).expect("iv") # Fetch blocks and verify result let blocks = (await buddy.blocksFetch(nFetchBodiesRequest, info)).valueOr: @@ -167,24 +168,24 @@ proc blocksStagedCollect*( ctx.pool.seenData = true # blocks data exist # Import blocks (no staging) - await ctx.blocksImport(Opt.some(peer), blocks, info) + await ctx.blocksImport(Opt.some(peer), blocks, buddy.peerID, info) - # Import probably incomplete, so a partial roll back may be needed + # Import may be incomplete, so a partial roll back may be needed let lastBn = blocks[^1].header.number - if ctx.blk.topImported < lastBn: - ctx.blocksUnprocAppend(ctx.blk.topImported + 1, lastBn) + if ctx.subState.top < lastBn: + ctx.blocksUnprocAppend(ctx.subState.top + 1, lastBn) # statistics - nImported += ctx.blk.topImported - blocks[0].header.number + 1 + nImported += ctx.subState.top - blocks[0].header.number + 1 # Buddy might have been cancelled while importing blocks. if buddy.ctrl.stopped or ctx.poolMode: - break fetchBlocksBody # done, exit this function + break fetchBlocksBody # done, exit this block # End while: headersUnprocFetch() + blocksImport() - # Continue fetching blocks and queue them (if any) - if ctx.blk.staged.len + ctx.blk.reserveStaged < blocksStagedQueueLengthHwm: + # Continue fetching blocks and stage/queue them (if any) + if ctx.blk.staged.len + ctx.blk.reserveStaged < blocksStagedQueueLengthMax: # Fetch blocks and verify result ctx.blk.reserveStaged.inc # Book a slot on `staged` @@ -192,20 +193,20 @@ proc blocksStagedCollect*( ctx.blk.reserveStaged.dec # Free that slot again if rc.isErr: - break fetchBlocksBody # done, exit this function + break fetchBlocksBody # done, exit this block let - blocks = rc.value - # Insert blocks list on the `staged` queue - key = blocks[0].header.number + key = rc.value[0].header.number qItem = ctx.blk.staged.insert(key).valueOr: raiseAssert info & ": duplicate key on staged queue iv=" & - (key, blocks[^1].header.number).bnStr + (key, rc.value[^1].header.number).bnStr - qItem.data.blocks = blocks # store `blocks[]` list + qItem.data.blocks = rc.value # store `blocks[]` list + qItem.data.peerID = buddy.peerID - nQueued += blocks.len # statistics + nQueued += rc.value.len # statistics + # End if # End block: `fetchBlocksBody` @@ -223,7 +224,7 @@ proc blocksStagedCollect*( return info "Queued/staged or imported blocks", - topImported=ctx.blk.topImported.bnStr, + topImported=ctx.subState.top.bnStr, unprocBottom=(if ctx.blocksModeStopped(): "n/a" else: ctx.blocksUnprocAvailBottom.bnStr), nQueued, nImported, nStagedQ=ctx.blk.staged.len, @@ -252,7 +253,7 @@ proc blocksStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ctx.blocksUnprocClear() ctx.blk.staged.clear() - ctx.blk.cancelRequest = false + ctx.subState.reset # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim index fa52c94789..c6d2a438c7 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim @@ -22,11 +22,11 @@ import # ------------------------------------------------------------------------------ func bdyErrors*(buddy: BeaconBuddyRef): string = - $buddy.only.nBdyRespErrors & "/" & $buddy.only.nBdyProcErrors + $buddy.only.nRespErrors.blk & "/" & $buddy.nBlkProcErrors() proc fetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = - buddy.only.nBdyRespErrors.inc - if fetchBodiesReqErrThresholdCount < buddy.only.nBdyRespErrors: + buddy.only.nRespErrors.blk.inc + if nFetchBodiesErrThreshold < buddy.only.nRespErrors.blk: if buddy.ctx.pool.nBuddies == 1 and slowPeer: # Remember that the current peer is the last one and is lablelled slow. # It would have been zombified if it were not the last one. This can be @@ -54,7 +54,7 @@ proc bodiesFetch*( try: resp = await peer.getBlockBodies(request) except PeerDisconnected as e: - buddy.only.nBdyRespErrors.inc + buddy.only.nRespErrors.blk.inc buddy.ctrl.zombie = true `info` info & " error", peer, nReq, elapsed=(Moment.now() - start).toStr, error=($e.name), msg=e.msg, bdyErrors=buddy.bdyErrors @@ -87,16 +87,16 @@ proc bodiesFetch*( buddy.fetchRegisterError() trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, elapsed=elapsed.toStr, syncState=($buddy.syncState), - nRespErrors=buddy.only.nBdyRespErrors + nRespErrors=buddy.only.nRespErrors.blk return err() # Ban an overly slow peer for a while when seen in a row. Also there is a # mimimum share of the number of requested headers expected, typically 10%. - if fetchBodiesReqErrThresholdZombie < elapsed or - b.len.uint64 * 100 < nReq.uint64 * fetchBodiesReqMinResponsePC: + if fetchBodiesErrTimeout < elapsed or + b.len.uint64 * 100 < nReq.uint64 * fetchBodiesMinResponsePC: buddy.fetchRegisterError(slowPeer=true) else: - buddy.only.nBdyRespErrors = 0 # reset error count + buddy.only.nRespErrors.blk = 0 # reset error count buddy.ctx.pool.blkLastSlowPeer = Opt.none(Hash) # not last one or not error trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim index 1d32df2dc0..99c6aab1af 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim @@ -59,7 +59,7 @@ proc blocksFetchCheckImpl( # There is nothing one can do here info "Block header missing (reorg triggered)", peer, iv, n, nth=(iv.minPt + n).bnStr - ctx.blk.cancelRequest = true # So require reorg + ctx.subState.cancelRequest = true # So require reorg return Opt.none(seq[EthBlock]) request.blockHashes[n - 1] = header.parentHash blocks[n].header = header @@ -67,7 +67,7 @@ proc blocksFetchCheckImpl( # There is nothing one can do here info "Block header missing (reorg triggered)", peer, iv, n=0, nth=iv.minPt.bnStr - ctx.blk.cancelRequest = true # So require reorg + ctx.subState.cancelRequest = true # So require reorg return Opt.none(seq[EthBlock]) request.blockHashes[^1] = blocks[^1].header.computeBlockHash @@ -98,6 +98,15 @@ proc blocksFetchCheckImpl( nTxs=bodies[n].transactions.len, nBodies, bdyErrors=buddy.bdyErrors break loop + # In order to avoid extensive checking here and also within the `FC` + # module, thourough checking is left to the `FC` module. Staging a few + # bogus blocks is not too expensive. + # + # If there is a mere block body error, all that will happen is that + # this block and the rest of the `blocks[]` list is discarded. This + # is also what will happen here if an error is detected (see above for + # erroneously empty `transactions[]`.) + # blocks[n].transactions = bodies[n].transactions blocks[n].uncles = bodies[n].uncles blocks[n].withdrawals = bodies[n].withdrawals @@ -105,7 +114,7 @@ proc blocksFetchCheckImpl( if 0 < blocks.len.uint64: return Opt.some(blocks) - buddy.only.nBdyProcErrors.inc + buddy.incBlkProcErrors() return Opt.none(seq[EthBlock]) # ------------------------------------------------------------------------------ @@ -126,16 +135,27 @@ proc blocksFetch*( ): Future[Opt[seq[EthBlock]]] {.async: (raises: []).} = ## From the p2p/ethXX network fetch as many blocks as given as argument `num`. - let - ctx = buddy.ctx + let ctx = buddy.ctx + + # Make sure that this sync peer is not banned from block processing, already. + if nProcBlocksErrThreshold < buddy.nBlkProcErrors(): + buddy.ctrl.zombie = true + return Opt.none(seq[EthBlock]) # stop, exit this function - # Fetch nect available interval + let + # Fetch next available interval iv = ctx.blocksUnprocFetch(num).valueOr: return Opt.none(seq[EthBlock]) - # Fetch blocks and verify result + # Fetch blocks and pre-verify result rc = await buddy.blocksFetchCheckImpl(iv, info) + # Job might have been cancelled or completed while downloading blocks. + # If so, no more bookkeeping of blocks must take place. The *books* + # might have been reset and prepared for the next stage. + if ctx.blocksModeStopped(): + return Opt.none(seq[EthBlock]) # stop, exit this function + # Commit blocks received if rc.isErr: ctx.blocksUnprocCommit(iv, iv) @@ -149,6 +169,7 @@ proc blocksImport*( ctx: BeaconCtxRef; maybePeer: Opt[Peer]; blocks: seq[EthBlock]; + peerID: Hash; info: static[string]; ) {.async: (raises: []).} = ## Import/execute a list of argument blocks. The function sets the global @@ -162,6 +183,7 @@ proc blocksImport*( nBlocks=iv.len, base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr + var isError = false block loop: for n in 0 ..< blocks.len: let nBn = blocks[n].header.number @@ -171,38 +193,64 @@ proc blocksImport*( nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n).short, B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr - ctx.blk.topImported = nBn # well, not really imported + ctx.subState.top = nBn # well, not really imported continue try: - (await ctx.chain.queueImportBlock(blocks[n])).isOkOr: - # The way out here is simply to re-compile the block queue. At any - # point, the `FC` module data area might have been moved to a new - # canonical branch. - # - ctx.blk.cancelRequest = true # So require reorg - warn info & ": import block error (reorg triggered)", n, iv, - nBlocks=iv.len, nthBn=nBn.bnStr, - nthHash=ctx.getNthHash(blocks, n).short, - B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, - `error`=error - break loop + (await ctx.chain.queueImportBlock blocks[n]).isOkOr: + isError = true + + # Mark peer that produced that unusable headers list as a zombie + ctx.setBlkProcFail peerID + + # Check whether it is enough to skip the current blocks list, only + if ctx.subState.procFailNum != nBn: + ctx.subState.procFailNum = nBn # OK, this is a new block + ctx.subState.procFailCount = 1 + + else: + ctx.subState.procFailCount.inc # block num was seen, already + + # Cancel the whole download if needed + if nImportBlocksErrThreshold < ctx.subState.procFailCount: + ctx.subState.cancelRequest = true # So require queue reset + + # Proper logging .. + if ctx.subState.cancelRequest: + warn "Import error (cancel this session)", n, iv, + nBlocks=iv.len, nthBn=nBn.bnStr, + nthHash=ctx.getNthHash(blocks, n).short, + base=ctx.chain.baseNumber.bnStr, + head=ctx.chain.latestNumber.bnStr, + blkFailCount=ctx.subState.procFailCount, `error`=error + else: + info "Import error (skip remaining)", n, iv, + nBlocks=iv.len, nthBn=nBn.bnStr, + nthHash=ctx.getNthHash(blocks, n).short, + base=ctx.chain.baseNumber.bnStr, + head=ctx.chain.latestNumber.bnStr, + blkFailCount=ctx.subState.procFailCount, `error`=error + + break loop # stop # isOk => next instruction except CancelledError: break loop # shutdown? - ctx.blk.topImported = nBn # Block imported OK + ctx.subState.top = nBn # Block imported OK # Allow pseudo/async thread switch. (await ctx.updateAsyncTasks()).isOkOr: break loop - info "Imported blocks", iv=(if iv.minPt <= ctx.blk.topImported: - (iv.minPt, ctx.blk.topImported).bnStr else: "n/a"), - nBlocks=(ctx.blk.topImported - iv.minPt + 1), - nFailed=(iv.maxPt - ctx.blk.topImported), + if not isError: + ctx.resetBlkProcErrors peerID + + info "Imported blocks", iv=(if iv.minPt <= ctx.subState.top: + (iv.minPt, ctx.subState.top).bnStr else: "n/a"), + nBlocks=(ctx.subState.top - iv.minPt + 1), + nFailed=(iv.maxPt - ctx.subState.top), base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr, targetHash=ctx.headHash.short + target=ctx.subState.head.bnStr, targetHash=ctx.subState.headHash.short # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/headers_staged.nim b/execution_chain/sync/beacon/worker/headers_staged.nim index fc1b761aed..e31bc99b81 100644 --- a/execution_chain/sync/beacon/worker/headers_staged.nim +++ b/execution_chain/sync/beacon/worker/headers_staged.nim @@ -15,7 +15,7 @@ import pkg/eth/common, pkg/stew/[interval_set, sorted_set], ../worker_desc, - ./headers_staged/[headers_fetch, staged_collect], + ./headers_staged/[headers_fetch, staged_collect, staged_headers], ./headers_unproc # ------------------------------------------------------------------------------ @@ -83,22 +83,24 @@ proc headersStagedCollect*( # # so any other peer arriving here will see a gap between `top` and # `dangling` which will lead them to fetch opportunistcally. - if top < ctx.dangling.number: + # + let dangling = ctx.hdrCache.antecedent.number + if top < dangling: break # Throw away overlap (should not happen anyway) - if ctx.dangling.number < top: - discard ctx.headersUnprocFetch(top-ctx.dangling.number).expect("iv") + if dangling < top: + discard ctx.headersUnprocFetch(top - dangling).expect("iv") let # Reserve the full range of block numbers so they can be appended in a # row. This avoid some fragmentation when header chains are stashed by # multiple peers, i.e. they interleave peer task-wise. - iv = ctx.headersUnprocFetch(nFetchHeadersBatch).valueOr: + iv = ctx.headersUnprocFetch(nFetchHeadersBatchListLen).valueOr: break fetchHeadersBody # done, exit this function # Get parent hash from the most senior stored header - parent = ctx.dangling.parentHash + parent = ctx.hdrCache.antecedent.parentHash # Fetch headers and store them on the header chain cache. The function # returns the last unprocessed block number @@ -125,8 +127,9 @@ proc headersStagedCollect*( ctx.headersUnprocCommit(iv) # all headers processed debug info & ": fetched headers count", peer, - unprocTop=ctx.headersUnprocAvailTop.bnStr, D=ctx.dangling.bnStr, - nStored, nStagedQ=ctx.hdr.staged.len, syncState=($buddy.syncState) + unprocTop=ctx.headersUnprocAvailTop.bnStr, + D=ctx.hdrCache.antecedent.bnStr, nStored, nStagedQ=ctx.hdr.staged.len, + syncState=($buddy.syncState) # Buddy might have been cancelled while downloading headers. if buddy.ctrl.stopped: @@ -136,11 +139,11 @@ proc headersStagedCollect*( # Continue opportunistically fetching by block number rather than hash. The # fetched headers need to be staged and checked/serialised later. - if ctx.hdr.staged.len + ctx.hdr.reserveStaged < headersStagedQueueLengthHwm: + if ctx.hdr.staged.len + ctx.hdr.reserveStaged < headersStagedQueueLengthMax: let # Comment see deterministic case - iv = ctx.headersUnprocFetch(nFetchHeadersBatch).valueOr: + iv = ctx.headersUnprocFetch(nFetchHeadersBatchListLen).valueOr: break fetchHeadersBody # done, exit this function # This record will accumulate the fetched headers. It must be on the @@ -225,7 +228,7 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = let minNum = qItem.data.revHdrs[^1].number maxNum = qItem.data.revHdrs[0].number - dangling = ctx.dangling.number + dangling = ctx.hdrCache.antecedent.number if maxNum + 1 < dangling: debug info & ": gap, serialisation postponed", peer, qItem=qItem.data.bnStr, D=dangling.bnStr, nStored, @@ -237,32 +240,29 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = discard ctx.hdr.staged.delete(qItem.key) # Store headers on database - ctx.hdrCache.put(qItem.data.revHdrs).isOkOr: - ctx.headersUnprocAppend(minNum, maxNum) - + if not buddy.headersStashOnDisk(qItem.data.revHdrs, info): # Error mark buddy that produced that unusable headers list - buddy.incHdrProcErrors qItem.data.peerID + ctx.incHdrProcErrors qItem.data.peerID - debug info & ": discarding staged header list", peer, - qItem=qItem.data.bnStr, D=ctx.dangling.bnStr, nStored, - nDiscarded=qItem.data.revHdrs.len, nSyncPeers=ctx.pool.nBuddies, - `error`=error + ctx.headersUnprocAppend(minNum, maxNum) switchPeer = true break # Antecedent `dangling` of the header cache might not be at `revHdrs[^1]`. - let revHdrsLen = maxNum - ctx.dangling.number + 1 + let revHdrsLen = maxNum - ctx.hdrCache.antecedent.number + 1 nStored += revHdrsLen.int # count headers # End while loop if 0 < nStored: - info "Headers serialised and stored", D=ctx.dangling.bnStr, nStored, - nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer + info "Headers serialised and stored", D=ctx.hdrCache.antecedent.bnStr, + nStored, nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies, + switchPeer elif 0 < ctx.hdr.staged.len and not switchPeer: - trace info & ": no headers processed", peer, D=ctx.dangling.bnStr, - nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies + trace info & ": no headers processed", peer, + D=ctx.hdrCache.antecedent.bnStr, nStagedQ=ctx.hdr.staged.len, + nSyncPeers=ctx.pool.nBuddies not switchPeer @@ -276,6 +276,7 @@ proc headersStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ctx.headersUnprocClear() # clears `unprocessed` and `borrowed` list ctx.hdr.staged.clear() + ctx.subState.reset # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim index 5775f94469..bfed3d065d 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim @@ -22,8 +22,8 @@ import # ------------------------------------------------------------------------------ proc registerError(buddy: BeaconBuddyRef, slowPeer = false) = - buddy.incHdrRespErrors() - if fetchHeadersReqErrThresholdCount < buddy.nHdrRespErrors: + buddy.only.nRespErrors.hdr.inc + if nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr: if 1 < buddy.ctx.pool.nBuddies or not slowPeer: buddy.ctrl.zombie = true # abandon slow peer unless last one @@ -32,7 +32,7 @@ proc registerError(buddy: BeaconBuddyRef, slowPeer = false) = # ------------------------------------------------------------------------------ func hdrErrors*(buddy: BeaconBuddyRef): string = - $buddy.nHdrRespErrors & "/" & $buddy.nHdrProcErrors() + $buddy.only.nRespErrors.hdr & "/" & $buddy.nHdrProcErrors() # ------------------------------------------------------------------------------ # Public functions @@ -80,7 +80,7 @@ proc headersFetchReversed*( # in `rplx` with a violated `req.timeoutAt <= Moment.now()` assertion. resp = await peer.getBlockHeaders(req) except PeerDisconnected as e: - buddy.only.nBdyRespErrors.inc + buddy.only.nRespErrors.hdr.inc buddy.ctrl.zombie = true `info` info & " error", peer, ivReq, nReq=req.maxResults, hash=topHash.toStr, elapsed=(Moment.now() - start).toStr, @@ -129,11 +129,11 @@ proc headersFetchReversed*( # Ban an overly slow peer for a while when seen in a row. Also there is a # mimimum share of the number of requested headers expected, typically 10%. - if fetchHeadersReqErrThresholdZombie < elapsed or - h.len.uint64 * 100 < req.maxResults * fetchHeadersReqMinResponsePC: + if fetchHeadersErrTimeout < elapsed or + h.len.uint64 * 100 < req.maxResults * fetchHeadersMinResponsePC: buddy.registerError() else: - buddy.nHdrRespErrors = 0 # reset error count + buddy.only.nRespErrors.hdr = 0 # reset error count trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, ivResp=BnRange.new(h[^1].number,h[0].number), diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim index e36d5eb9f1..6c731c5ff7 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim @@ -15,36 +15,12 @@ import pkg/eth/common, pkg/stew/interval_set, ../../worker_desc, - ./headers_fetch - -# ------------------------------------------------------------------------------ -# Private logging helpers -# ------------------------------------------------------------------------------ - -func bnStr(w: seq[Header]): string = - ## Pretty print reverse sequence of headers as interval - if w.len == 0: "n/a" else: (w[^1].number,w[0].number).bnStr + ./[headers_fetch, staged_headers] # ------------------------------------------------------------------------------ # Private helpers # ------------------------------------------------------------------------------ -proc updateBuddyErrorState(buddy: BeaconBuddyRef) = - ## Helper/wrapper - if ((0 < buddy.nHdrRespErrors or - 0 < buddy.nHdrProcErrors) and buddy.ctrl.stopped) or - fetchHeadersReqErrThresholdCount < buddy.nHdrRespErrors or - fetchHeadersProcessErrThresholdCount < buddy.nHdrProcErrors: - - # Make sure that this peer does not immediately reconnect - buddy.ctrl.zombie = true - -proc updateBuddyProcError(buddy: BeaconBuddyRef) = - buddy.incHdrProcErrors() - buddy.updateBuddyErrorState() - -# ------------------ - proc fetchRev( buddy: BeaconBuddyRef; ivReq: BnRange; @@ -54,7 +30,7 @@ proc fetchRev( {.async: (raises: []).} = ## Helper/wrapper var rev = (await buddy.headersFetchReversed(ivReq, parent, info)).valueOr: - buddy.updateBuddyErrorState() + buddy.headersUpdateBuddyErrorState() debug info & ": header fetch error", peer=buddy.peer, ivReq, nReq=ivReq.len, parent=parent.toStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors @@ -119,18 +95,15 @@ proc collectAndStashOnDiskCache*( # Fetch headers for this range of block numbers rev = (await buddy.fetchRev(ivReq, parent, info)).valueOr: - break fetchHeadersBody # error => exit block + break fetchHeadersBody # error => exit block # Job might have been cancelled while downloading headrs if ctx.collectModeStopped(): - break fetchHeadersBody # stop => exit block + break fetchHeadersBody # stop => exit block # Store it on the header chain cache - ctx.hdrCache.put(rev).isOkOr: - buddy.updateBuddyProcError() - debug info & ": header stash error", peer, iv, ivReq, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, `error`=error - break fetchHeadersBody # error => exit block + if not buddy.headersStashOnDisk(rev, info): + break fetchHeadersBody # error => exit block # Note that `put()` might not have used all of the `rev[]` items for # updating the antecedent (aka `ctx.dangling`.) So `rev[^1]` might be @@ -149,28 +122,28 @@ proc collectAndStashOnDiskCache*( # Update remaining range to fetch and check for end-of-loop condition let newTopBefore = ivTop - BlockNumber(rev.len) if newTopBefore < iv.minPt: - break # exit while() loop + break # exit while() loop - ivTop = newTopBefore # mostly results in `ivReq.minPt-1` - parent = rev[^1].parentHash # parent hash for next fetch request + ivTop = newTopBefore # mostly results in `ivReq.minPt-1` + parent = rev[^1].parentHash # parent hash for next fetch request # End loop trace info & ": fetched and stored headers", peer, iv, nHeaders=iv.len, - D=ctx.dangling.bnStr, syncState=($buddy.syncState), + D=ctx.hdrCache.antecedent.bnStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors # Reset header process errors (not too many consecutive failures this time) - buddy.nHdrProcErrors = 0 # all OK, reset error count + buddy.nHdrProcErrors = 0 # all OK, reset error count return iv.minPt-1 # Start processing some error or an incomplete fetch/store result trace info & ": partially fetched/stored headers", peer, iv=(if ivTop < iv.maxPt: BnRange.new(ivTop+1,iv.maxPt).bnStr else: "n/a"), - nHeaders=(iv.maxPt-ivTop), D=ctx.dangling.bnStr, + nHeaders=(iv.maxPt-ivTop), D=ctx.hdrCache.antecedent.bnStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - return ivTop # there is some left over range + return ivTop # there is some left over range proc collectAndStageOnMemQueue*( @@ -188,8 +161,8 @@ proc collectAndStageOnMemQueue*( ctx = buddy.ctx peer = buddy.peer var - ivTop = iv.maxPt # top end of the current range to fetch - parent = EMPTY_ROOT_HASH # parent hash for next fetch request + ivTop = iv.maxPt # top end of the current range to fetch + parent = EMPTY_ROOT_HASH # parent hash for next fetch request block fetchHeadersBody: @@ -203,11 +176,11 @@ proc collectAndStageOnMemQueue*( # Fetch headers for this range of block numbers rev = (await buddy.fetchRev(ivReq, parent, info)).valueOr: - break fetchHeadersBody # error => exit block + break fetchHeadersBody # error => exit block # Job might have been cancelled while downloading headrs if ctx.collectModeStopped(): - break fetchHeadersBody # stop => exit block + break fetchHeadersBody # stop => exit block # While assembling a `LinkedHChainRef`, only boundary checks are used to # verify that the header lists are acceptable. A thorough check will be @@ -216,11 +189,11 @@ proc collectAndStageOnMemQueue*( # Boundary check for block numbers let ivBottom = ivTop - rev.len.uint64 + 1 if rev[0].number != ivTop or rev[^1].number != ivBottom: - buddy.updateBuddyProcError() + buddy.headersUpdateBuddyProcError() debug info & ": header queue error", peer, iv, ivReq, receivedHeaders=rev.bnStr, expected=(ivBottom,ivTop).bnStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - break fetchHeadersBody # error => exit block + break fetchHeadersBody # error => exit block # Check/update hashes let hash0 = rev[0].computeBlockHash @@ -228,39 +201,40 @@ proc collectAndStageOnMemQueue*( lhc.hash = hash0 else: if lhc.revHdrs[^1].parentHash != hash0: - buddy.updateBuddyProcError() + buddy.headersUpdateBuddyProcError() debug info & ": header queue error", peer, iv, ivReq, hash=hash0.toStr, expected=lhc.revHdrs[^1].parentHash.toStr, - ctrl=buddy.ctrl.state, hdrErrors=buddy.hdrErrors - break fetchHeadersBody # error => exit block + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors + break fetchHeadersBody # error => exit block lhc.revHdrs &= rev # Update remaining range to fetch and check for end-of-loop condition if ivTop < iv.minPt + rev.len.uint64: - break # exit while loop + break # exit while loop - parent = rev[^1].parentHash # continue deterministically - ivTop -= rev.len.uint64 # mostly results in `ivReq.minPt-1` + parent = rev[^1].parentHash # continue deterministically + ivTop -= rev.len.uint64 # mostly results in `ivReq.minPt-1` # End loop trace info & ": fetched and staged all headers", peer, iv, - D=ctx.dangling.bnStr, nHeaders=iv.len, syncState=($buddy.syncState), - hdrErrors=buddy.hdrErrors + D=ctx.hdrCache.antecedent.bnStr, nHeaders=iv.len, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors # Reset header process errors (not too many consecutive failures this time) - buddy.nHdrProcErrors = 0 # all OK, reset error count + buddy.nHdrProcErrors = 0 # all OK, reset error count - return iv.minPt-1 # all fetched as instructed + return iv.minPt-1 # all fetched as instructed # End block: `fetchHeadersBody` # Start processing some error or an incomplete fetch/stage result trace info & ": partially fetched and staged headers", peer, iv, - D=ctx.dangling.bnStr, stagedHeaders=lhc.bnStr, nHeaders=lhc.revHdrs.len, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors + D=ctx.hdrCache.antecedent.bnStr, stagedHeaders=lhc.bnStr, + nHeaders=lhc.revHdrs.len, syncState=($buddy.syncState), + hdrErrors=buddy.hdrErrors - return ivTop # there is some left over range + return ivTop # there is some left over range # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim new file mode 100644 index 0000000000..dcac96050f --- /dev/null +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim @@ -0,0 +1,70 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at +# https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at +# https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises:[].} + +import + pkg/[chronicles, chronos], + pkg/eth/common, + pkg/stew/interval_set, + ../../worker_desc, + ./headers_fetch + +# ------------------------------------------------------------------------------ +# Public helper functions +# ------------------------------------------------------------------------------ + +proc headersUpdateBuddyErrorState*(buddy: BeaconBuddyRef) = + ## Helper/wrapper + if ((0 < buddy.only.nRespErrors.hdr or + 0 < buddy.nHdrProcErrors()) and buddy.ctrl.stopped) or + nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr or + nProcHeadersErrThreshold < buddy.nHdrProcErrors(): + + # Make sure that this peer does not immediately reconnect + buddy.ctrl.zombie = true + +proc headersUpdateBuddyProcError*(buddy: BeaconBuddyRef) = + buddy.incHdrProcErrors() + buddy.headersUpdateBuddyErrorState() + +# ----------------- + +proc headersStashOnDisk*( + buddy: BeaconBuddyRef; + revHdrs: seq[Header]; + info: static[string]; + ): bool = + ## Convenience wrapper, makes it easy to produce comparable messages + ## whenever it is called similar to `blocksImport()`. + let + ctx = buddy.ctx + d9 = ctx.hdrCache.antecedent.number # for logging + rc = ctx.hdrCache.put(revHdrs) + + if rc.isErr: + buddy.headersUpdateBuddyProcError() + debug info & ": header stash error", peer=buddy.peer, iv=revHdrs.bnStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, error=rc.error + + let d0 = ctx.hdrCache.antecedent.number + info "Cached headers", iv=(if d0 < d9: (d0,d9-1).bnStr else: "n/a"), + nHeaders=(d9 - d0), + nSkipped=(if rc.isErr: 0u64 + elif revHdrs[^1].number <= d0: (d0 - revHdrs[^1].number) + else: revHdrs.len.uint64), + base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, + target=ctx.subState.head.bnStr, targetHash=ctx.subState.headHash.short + + rc.isOk + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim index 3bc48c0b85..f6a20c1614 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim @@ -38,6 +38,6 @@ func headersStagedQueueClear*(ctx: BeaconCtxRef) = func headersStagedQueueInit*(ctx: BeaconCtxRef) = ## Constructor - ctx.hdr.staged = LinkedHChainQueue.init() + ctx.hdr.staged = StagedHeaderQueue.init() # End diff --git a/execution_chain/sync/beacon/worker/helpers.nim b/execution_chain/sync/beacon/worker/helpers.nim index ea4fb57201..82df9be7a4 100644 --- a/execution_chain/sync/beacon/worker/helpers.nim +++ b/execution_chain/sync/beacon/worker/helpers.nim @@ -41,9 +41,14 @@ func bnStr*(w: seq[EthBlock]): string = if w.len == 0: "n/a" else: (w[0].header.number, w[^1].header.number).bnStr +func bnStr*(rev: seq[Header]): string = + ## Pretty print *reverse* sequence of headers as interval + if rev.len == 0: "n/a" else: (rev[^1].number,rev[0].number).bnStr + func bnStr*(w: Interval[BlockNumber,uint64]): string = (w.minPt,w.maxPt).bnStr + func toStr*(a: chronos.Duration): string = var s = a.toString 2 if s.len == 0: s="0" diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index dc225359ef..d60684d0ec 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -29,8 +29,13 @@ type proc querySyncProgress(ctx: BeaconCtxRef): SyncStateData = ## Syncer status query function (for call back closure) + if blocks <= ctx.pool.lastState: + return (ctx.hdrCache.antecedent.number, ctx.subState.top, ctx.subState.head) + if headers <= ctx.pool.lastState: - return (ctx.chain.baseNumber, ctx.dangling.number, ctx.head.number) + let b = ctx.chain.baseNumber + return (b, b, ctx.subState.head) + # (0,0,0) # ------------------------------------------------------------------------------ @@ -50,7 +55,7 @@ proc setupServices*(ctx: BeaconCtxRef; info: static[string]) = ctx.hibernate = true # Set up header cache descriptor - ctx.pool.hdrCache = HeaderChainRef.init(ctx.pool.chain) + ctx.pool.hdrCache = HeaderChainRef.init(ctx.chain) # Set up the notifier informing when a new syncer session has started. ctx.hdrCache.start proc() = @@ -58,10 +63,11 @@ proc setupServices*(ctx: BeaconCtxRef; info: static[string]) = ctx.updateFromHibernateSetTarget info # Manual first run? - if 0 < ctx.clReq.consHead.number: - debug info & ": pre-set target", consHead=ctx.clReq.consHead.bnStr, - finalHash=ctx.clReq.finalHash.short - ctx.hdrCache.headTargetUpdate(ctx.clReq.consHead, ctx.clReq.finalHash) + if 0 < ctx.pool.clReq.consHead.number: + debug info & ": pre-set target", consHead=ctx.pool.clReq.consHead.bnStr, + finalHash=ctx.pool.clReq.finalHash.short + ctx.hdrCache.headTargetUpdate( + ctx.pool.clReq.consHead, ctx.pool.clReq.finalHash) # Provide progress info call back handler ctx.pool.chain.com.beaconSyncerProgress = proc(): SyncStateData = @@ -89,13 +95,13 @@ proc startBuddy*(buddy: BeaconBuddyRef): bool = acceptProto(eth68): ctx.pool.nBuddies.inc ctx.pool.blkLastSlowPeer = Opt.none(Hash) - buddy.initHdrProcErrors() + buddy.initProcErrors() return true proc stopBuddy*(buddy: BeaconBuddyRef) = buddy.ctx.pool.nBuddies.dec - buddy.clearHdrProcErrors() + buddy.clearProcErrors() # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/update.nim b/execution_chain/sync/beacon/worker/update.nim index 6458910f17..42b282f251 100644 --- a/execution_chain/sync/beacon/worker/update.nim +++ b/execution_chain/sync/beacon/worker/update.nim @@ -30,6 +30,9 @@ proc startHibernating(ctx: BeaconCtxRef; info: static[string]) = doAssert ctx.blocksStagedQueueIsEmpty() doAssert ctx.headersUnprocIsEmpty() doAssert ctx.headersStagedQueueIsEmpty() + doAssert ctx.subState.top == 0 + doAssert ctx.subState.head == 0 + doAssert not ctx.subState.cancelRequest ctx.hdrCache.clear() @@ -50,28 +53,32 @@ proc commitCollectHeaders(ctx: BeaconCtxRef; info: static[string]): bool = ctx.hdrCache.commit().isOkOr: trace info & ": cannot finalise header chain", B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr, - D=ctx.dangling.bnStr, H=ctx.head.bnStr, `error`=error + D=ctx.hdrCache.antecedent.bnStr, H=ctx.hdrCache.head.bnStr, + `error`=error return false true proc setupProcessingBlocks(ctx: BeaconCtxRef; info: static[string]) = + ## Prepare for blocks processing doAssert ctx.blocksUnprocIsEmpty() doAssert ctx.blocksStagedQueueIsEmpty() + doAssert ctx.subState.top == 0 + doAssert ctx.subState.head == 0 + doAssert not ctx.subState.cancelRequest # Reset for useles block download detection (to avoid deadlock) ctx.pool.failedPeers.clear() ctx.pool.seenData = false - # Prepare for blocks processing - let - d = ctx.dangling.number - h = ctx.head().number + # Re-initialise sub-state variables + ctx.subState.top = ctx.hdrCache.antecedent.number - 1 + ctx.subState.head = ctx.hdrCache.head.number + ctx.subState.headHash = ctx.hdrCache.headHash # Update list of block numbers to process - ctx.blocksUnprocSet(d, h) - ctx.blk.topImported = d - 1 + ctx.blocksUnprocSet(ctx.subState.top + 1, ctx.subState.head) # ------------------------------------------------------------------------------ # Private state transition handlers @@ -86,10 +93,10 @@ func idleNext(ctx: BeaconCtxRef; info: static[string]): SyncState = proc headersNext(ctx: BeaconCtxRef; info: static[string]): SyncState = ## State transition handler if not ctx.pool.seenData and # checks for cul-de-sac syncing - fetchHeadersFailedInitialFailPeersHwm < ctx.pool.failedPeers.len: + nFetchHeadersFailedInitialPeersThreshold < ctx.pool.failedPeers.len: debug info & ": too many failed header peers", failedPeers=ctx.pool.failedPeers.len, - limit=fetchHeadersFailedInitialFailPeersHwm + limit=nFetchHeadersFailedInitialPeersThreshold return headersCancel if ctx.hdrCache.state == collecting: @@ -121,17 +128,16 @@ proc headersFinishNext(ctx: BeaconCtxRef; info: static[string]): SyncState = proc blocksNext(ctx: BeaconCtxRef; info: static[string]): SyncState = ## State transition handler if not ctx.pool.seenData and # checks for cul-de-sac syncing - fetchBodiesFailedInitialFailPeersHwm < ctx.pool.failedPeers.len: + nFetchBodiesFailedInitialPeersThreshold < ctx.pool.failedPeers.len: debug info & ": too many failed block peers", failedPeers=ctx.pool.failedPeers.len, - limit=fetchBodiesFailedInitialFailPeersHwm + limit=nFetchBodiesFailedInitialPeersThreshold return blocksCancel - if ctx.blk.cancelRequest: + if ctx.subState.cancelRequest: return blocksCancel - if ctx.blocksStagedQueueIsEmpty() and - ctx.blocksUnprocIsEmpty(): + if ctx.subState.head <= ctx.subState.top: return blocksFinish SyncState.blocks @@ -200,15 +206,22 @@ proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = let prevState = ctx.pool.lastState ctx.pool.lastState = newState - # Most states require synchronisation via `poolMode` - if newState notin {idle, SyncState.headers, SyncState.blocks}: - ctx.poolMode = true - info "State change, waiting for sync", prevState, newState, + case newState: + of idle: + info "State changed", prevState, newState, + base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, nSyncPeers=ctx.pool.nBuddies - else: + + of SyncState.headers, SyncState.blocks: info "State changed", prevState, newState, base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, - target=ctx.head.bnStr, targetHash=ctx.headHash.short + target=ctx.subState.head.bnStr, targetHash=ctx.subState.headHash.short + + else: + # Most states require synchronisation via `poolMode` + ctx.poolMode = true + info "State change, waiting for sync", prevState, newState, + nSyncPeers=ctx.pool.nBuddies # Final sync scrum layout reached or inconsistent/impossible state if newState == idle: @@ -231,9 +244,11 @@ proc updateFromHibernateSetTarget*( # Update range ctx.headersUnprocSet(b+1, t-1) + ctx.subState.head = t + ctx.subState.headHash = ctx.hdrCache.headHash info "Activating syncer", base=b.bnStr, head=ctx.chain.latestNumber.bnStr, - target=t.bnStr, targetHash=ctx.headHash.short, + target=t.bnStr, targetHash=ctx.subState.headHash.short, nSyncPeers=ctx.pool.nBuddies return diff --git a/execution_chain/sync/beacon/worker/update/metrics.nim b/execution_chain/sync/beacon/worker/update/metrics.nim index ca1fe5ca25..7362482bf7 100644 --- a/execution_chain/sync/beacon/worker/update/metrics.nim +++ b/execution_chain/sync/beacon/worker/update/metrics.nim @@ -30,6 +30,9 @@ declareGauge nec_sync_coupler, "" & declareGauge nec_sync_dangling, "" & "Least block number for header chain already fetched" +declareGauge nec_sync_last_block_imported, "" & + "last block successfully imported/executed by FC module" + declareGauge nec_sync_head, "" & "Current sync target block number (if any)" @@ -58,17 +61,18 @@ declareGauge nec_sync_non_peers_connected, "" & template updateMetricsImpl(ctx: BeaconCtxRef) = - metrics.set(nec_base, ctx.chain.baseNumber().int64) - metrics.set(nec_execution_head, ctx.chain.latestNumber().int64) + metrics.set(nec_base, ctx.chain.baseNumber.int64) + metrics.set(nec_execution_head, ctx.chain.latestNumber.int64) var coupler = ctx.headersUnprocTotalBottom() if high(int64).uint64 <= coupler: coupler = 0 metrics.set(nec_sync_coupler, coupler.int64) - metrics.set(nec_sync_dangling, ctx.dangling.number.int64) - metrics.set(nec_sync_head, ctx.head.number.int64) + metrics.set(nec_sync_dangling, ctx.hdrCache.antecedent.number.int64) + metrics.set(nec_sync_last_block_imported, ctx.subState.top.int64) + metrics.set(nec_sync_head, ctx.subState.head.int64) # Show last valid state. - let consHeadNumber = ctx.consHeadNumber + let consHeadNumber = ctx.hdrCache.latestConsHeadNumber if 0 < consHeadNumber: metrics.set(nec_sync_consensus_head, consHeadNumber.int64) diff --git a/execution_chain/sync/beacon/worker/update/ticker.nim b/execution_chain/sync/beacon/worker/update/ticker.nim index e9c7fb6dd0..06183b1396 100644 --- a/execution_chain/sync/beacon/worker/update/ticker.nim +++ b/execution_chain/sync/beacon/worker/update/ticker.nim @@ -33,6 +33,7 @@ type latest: BlockNumber coupler: BlockNumber dangling: BlockNumber + top: BlockNumber head: BlockNumber target: BlockNumber activeOk: bool @@ -48,7 +49,6 @@ type nBlkUnprocFragm: int nBlkStaged: int blkStagedBottom: BlockNumber - blkTopImported: BlockNumber state: SyncState nBuddies: int @@ -71,26 +71,26 @@ when enableTicker: proc updater(ctx: BeaconCtxRef): TickerStats = ## Legacy stuff, will be probably be superseded by `metrics` TickerStats( - base: ctx.chain.baseNumber(), - latest: ctx.chain.latestNumber(), + base: ctx.chain.baseNumber, + latest: ctx.chain.latestNumber, coupler: ctx.headersUnprocTotalBottom(), - dangling: ctx.dangling.number, - head: ctx.head.number, - target: ctx.consHeadNumber, + dangling: ctx.hdrCache.antecedent.number, + top: ctx.subState.top, + head: ctx.subState.head, + target: ctx.hdrCache.latestConsHeadNumber, activeOk: ctx.pool.lastState != idle, nHdrStaged: ctx.headersStagedQueueLen(), hdrStagedTop: ctx.headersStagedQueueTopKey(), hdrUnprocTop: ctx.headersUnprocTotalTop(), nHdrUnprocessed: ctx.headersUnprocTotal(), - nHdrUnprocFragm: ctx.hdr.unprocessed.chunks(), + nHdrUnprocFragm: ctx.hdr.unprocessed.chunks, nBlkStaged: ctx.blocksStagedQueueLen(), blkStagedBottom: ctx.blocksStagedQueueBottomKey(), blkUnprocBottom: ctx.blocksUnprocTotalBottom(), nBlkUnprocessed: ctx.blocksUnprocTotal(), - nBlkUnprocFragm: ctx.blk.unprocessed.chunks(), - blkTopImported: ctx.blk.topImported, + nBlkUnprocFragm: ctx.blk.unprocessed.chunks, state: ctx.pool.lastState, nBuddies: ctx.pool.nBuddies) @@ -108,7 +108,7 @@ when enableTicker: let B = if data.base == data.latest: "L" else: data.base.bnStr L = if data.latest == data.coupler: "C" else: data.latest.bnStr - I = if data.blkTopImported == 0: "n/a" else : data.blkTopImported.bnStr + I = if data.top == 0: "n/a" else : data.top.bnStr C = if data.coupler == data.dangling: "D" elif data.coupler < high(int64).uint64: data.coupler.bnStr else: "n/a" diff --git a/execution_chain/sync/beacon/worker_const.nim b/execution_chain/sync/beacon/worker_const.nim index 802abe9bcf..c673207e44 100644 --- a/execution_chain/sync/beacon/worker_const.nim +++ b/execution_chain/sync/beacon/worker_const.nim @@ -62,11 +62,12 @@ const # ---------------------- - fetchHeadersFailedInitialFailPeersHwm* = 30 - ## If there are more failing peers than this `hwm` right at the begining - ## of a header chain download scrum (before any data received), then this - ## scrum is discarded and the suncer is reset and suspened (waiting for - ## the next instruction to run a scrum.) + nFetchHeadersFailedInitialPeersThreshold* = 30 + ## If there are more failing peers than this threshold right at the + ## begining of a header chain download scrum (before any data received), + ## then this session (scrum or sprint) is discarded and the suncer is + ## reset and suspened (waiting for the next activation to restart a new + ## session.) nFetchHeadersRequest* = 1_024 ## Number of headers that will be requested with a single `eth/xx` message. @@ -74,56 +75,55 @@ const ## On `Geth`, responses to larger requests are all truncted to 1024 header ## entries (see `Geth` constant `maxHeadersServe`.) - fetchHeadersReqErrThresholdZombie* = chronos.seconds(2) - fetchHeadersReqErrThresholdCount* = 2 + fetchHeadersErrTimeout* = chronos.seconds(2) + nFetchHeadersErrThreshold* = 2 ## Response time allowance. If the response time for the set of headers - ## exceeds this threshold for more than `fetchHeadersReqThresholdCount` + ## exceeds this threshold for more than `nFetchHeadersErrThreshold` ## times in a row, then this peer will be banned for a while. - fetchHeadersProcessErrThresholdCount* = 2 - ## Similar to `fetchHeadersReqErrThresholdCount` but for the later part + nProcHeadersErrThreshold* = 2 + ## Similar to `nFetchHeadersErrThreshold` but for the later part ## when errors occur while block headers are queued and further processed. - fetchHeadersReqMinResponsePC* = 10 + fetchHeadersMinResponsePC* = 10 ## Some peers only returned one header at a time. If these peers sit on a ## farm, they might collectively slow down the download process. So this - ## constant sets a percentage of minimum headers needed to return so that - ## the peers is not treated as a slow responder (see above for slow - ## responder count.) + ## constant sets a percentage of minimum headers needed to response with + ## so that the peers is not treated as a slow responder (see also above + ## for slow responder timeout.) - nFetchHeadersBatch* = 8 * nFetchHeadersRequest - ## Length of the request/stage batch. Several headers are consecutively + nFetchHeadersBatchListLen* = 8 * nFetchHeadersRequest + ## Length of a request/stage batch list. Several headers are consecutively ## fetched and stashed together as a single record on the staged queue. - headersStagedQueueLengthHwm* = 8 - ## Limit the number of records in the staged headers queue. - ## - ## Queue entries start accumulating if one peer stalls while fetching the - ## top chain so leaving a gap. This gap must be filled first before - ## inserting the queue into a contiguous chain of headers. + headersStagedQueueLengthMax* = 8 + ## If the staged header queue reaches this many queue objects for + ## serialising and caching on disk, no further objects are added. # ---------------------- - fetchBodiesFailedInitialFailPeersHwm* = 50 - ## Similar to `fetchHeadersFailedInitialFailPeersHwm` + nFetchBodiesFailedInitialPeersThreshold* = 50 + ## Similar to `nFetchHeadersFailedInitialPeersThreshold`. + + nFetchBodiesRequest* = 40 + ## Similar to `nFetchHeadersRequest`. - nFetchBodiesRequest* = 64 - ## Similar to `nFetchHeadersRequest` + fetchBodiesErrTimeout* = chronos.seconds(4) + nFetchBodiesErrThreshold* = 2 + ## Similar to `nFetchHeadersErrThreshold`. - fetchBodiesReqErrThresholdZombie* = chronos.seconds(4) - fetchBodiesReqErrThresholdCount* = 2 - ## Similar to `fetchHeadersReqThreshold*` + fetchBodiesMinResponsePC* = 10 + ## Similar to ``fetchHeadersMinResponsePC`. - fetchBodiesProcessErrThresholdCount* = 2 - ## Similar to `fetchHeadersProcessErrThresholdCount`. + nProcBlocksErrThreshold* = 2 + ## Similar to `nProcHeadersErrThreshold`. - fetchBodiesReqMinResponsePC* = 10 - ## Similar to `fetchHeadersReqMinResponsePC` + nImportBlocksErrThreshold* = 2 + ## Abort block import and the whole sync session with it if too many + ## failed imports occur into `FC` module. - blocksStagedQueueLengthHwm* = 2 - ## If the staged block queue exceeds this many number of queue objects for - ## import, no further block objets are added (but the current sub-list is - ## completed.) + blocksStagedQueueLengthMax* = 2 + ## Similar to `headersStagedQueueLengthMax`. # ---------------------- @@ -131,10 +131,10 @@ static: doAssert 0 < runsThisManyPeersOnly doAssert 0 < nFetchHeadersRequest - doAssert nFetchHeadersRequest <= nFetchHeadersBatch - doAssert 0 < headersStagedQueueLengthHwm + doAssert nFetchHeadersRequest <= nFetchHeadersBatchListLen + doAssert 0 < headersStagedQueueLengthMax doAssert 0 < nFetchBodiesRequest - doAssert 0 < blocksStagedQueueLengthHwm + doAssert 0 < blocksStagedQueueLengthMax # End diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index 27fcec56ef..a688096d8f 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -29,11 +29,11 @@ type BnRange* = Interval[BlockNumber,uint64] ## Single block number interval - LinkedHChainQueue* = SortedSet[BlockNumber,LinkedHChain] + StagedHeaderQueue* = SortedSet[BlockNumber,LinkedHChain] ## Block intervals sorted by largest block number. LinkedHChain* = object - ## Public block items for the `LinkedHChainQueue` list, indexed by the + ## Public block items for the `StagedHeaderQueue` list, indexed by the ## largest block number. The list `revHdrs[]` is reversed, i.e. the largest ## block number has the least index `0`. This makes it easier to grow the ## sequence with parent headers, i.e. decreasing block numbers. @@ -46,8 +46,9 @@ type ## Blocks sorted by least block number. BlocksForImport* = object - ## Block request item sorted by least block number (i.e. from `blocks[0]`.) - blocks*: seq[EthBlock] ## List of blocks for import + ## Blocks list item indexed by least block number (i.e. by `blocks[0]`.) + blocks*: seq[EthBlock] ## List of blocks lineage for import + peerID*: Hash ## For comparing peers # ------------------- @@ -58,29 +59,38 @@ type # ------------------- + SyncSubState* = object + ## Bundelled state variables, easy to clear all with one `reset`. + top*: BlockNumber ## For locally syncronising block import + head*: BlockNumber ## Copy of `ctx.hdrCache.head()` + headHash*: Hash32 ## Copy of `ctx.hdrCache.headHash()` + cancelRequest*: bool ## Cancel block sync via state machine + procFailNum*: BlockNumber ## Block (or header) error location + procFailCount*: uint8 ## Number of failures at location + HeaderFetchSync* = object ## Header sync staging area unprocessed*: BnRangeSet ## Block or header ranges to fetch borrowed*: BnRangeSet ## Fetched/locked ranges - staged*: LinkedHChainQueue ## Blocks fetched but not stored yet + staged*: StagedHeaderQueue ## Blocks fetched but not stored yet reserveStaged*: int ## Pre-book staged slot temporarily BlocksFetchSync* = object ## Block sync staging area unprocessed*: BnRangeSet ## Blocks download requested borrowed*: BnRangeSet ## Fetched/locked fetched ranges - topImported*: BlockNumber ## For syncronising opportunistic import staged*: StagedBlocksQueue ## Blocks ready for import reserveStaged*: int ## Pre-book staged slot temporarily - cancelRequest*: bool ## Cancel block sync via state machine # ------------------- + BuddyError* = tuple + ## Count fetching or processing errors + hdr, blk: uint8 + BeaconBuddyData* = object ## Local descriptor data extension - nHdrRespErrors*: uint8 ## Number of errors/slow responses in a row - nBdyRespErrors*: uint8 ## Ditto for bodies - nBdyProcErrors*: uint8 ## Number of body post processing errors + nRespErrors*: BuddyError ## Number of errors/slow responses in a row # Debugging and logging. nMultiLoop*: int ## Number of runs @@ -94,6 +104,7 @@ type lastState*: SyncState ## Last known layout state hdrSync*: HeaderFetchSync ## Syncing by linked header chains blkSync*: BlocksFetchSync ## For importing/executing blocks + subState*: SyncSubState ## Additional state variables nextMetricsUpdate*: Moment ## For updating metrics nextAsyncNanoSleep*: Moment ## Use nano-sleeps for task switch @@ -101,7 +112,7 @@ type hdrCache*: HeaderChainRef ## Currently in tandem with `chain` # Info, debugging, and error handling stuff - hdrProcError*: Table[Hash,uint8] ## Some globally accessible header errors + nProcError*: Table[Hash,BuddyError] ## Per peer processing error blkLastSlowPeer*: Opt[Hash] ## Register slow peer when last one failedPeers*: HashSet[Hash] ## Detect dead end sync by collecting peers seenData*: bool ## Set `true` is data were fetched, already @@ -120,28 +131,6 @@ type # Public helpers # ------------------------------------------------------------------------------ -func hdrCache*(ctx: BeaconCtxRef): HeaderChainRef = - ## Shortcut - ctx.pool.hdrCache - -func head*(ctx: BeaconCtxRef): Header = - ## Shortcut - ctx.hdrCache.head() - -func headHash*(ctx: BeaconCtxRef): Hash32 = - ## Shortcut - ctx.hdrCache.headHash() - -func dangling*(ctx: BeaconCtxRef): Header = - ## Shortcut - ctx.hdrCache.antecedent() - -func consHeadNumber*(ctx: BeaconCtxRef): BlockNumber = - ## Shortcut - ctx.hdrCache.latestConsHeadNumber() - -# ------------ - func hdr*(ctx: BeaconCtxRef): var HeaderFetchSync = ## Shortcut ctx.pool.hdrSync @@ -150,37 +139,20 @@ func blk*(ctx: BeaconCtxRef): var BlocksFetchSync = ## Shortcut ctx.pool.blkSync -func clReq*(ctx: BeaconCtxRef): var SyncClMesg = +func subState*(ctx: BeaconCtxRef): var SyncSubState = ## Shortcut - ctx.pool.clReq + ctx.pool.subState func chain*(ctx: BeaconCtxRef): ForkedChainRef = ## Getter ctx.pool.chain -func db*(ctx: BeaconCtxRef): CoreDbRef = - ## Getter - ctx.pool.chain.db +func hdrCache*(ctx: BeaconCtxRef): HeaderChainRef = + ## Shortcut + ctx.pool.hdrCache # ----- -func syncState*( - ctx: BeaconCtxRef; - ): (SyncState,HeaderChainMode,bool) = - ## Getter, triple of relevant run-time states - (ctx.pool.lastState, - ctx.hdrCache.state, - ctx.poolMode) - -func syncState*( - buddy: BeaconBuddyRef; - ): (BuddyRunState,SyncState,HeaderChainMode,bool) = - ## Getter, also includes buddy state - (buddy.ctrl.state, - buddy.ctx.pool.lastState, - buddy.ctx.hdrCache.state, - buddy.ctx.poolMode) - func hibernate*(ctx: BeaconCtxRef): bool = ## Getter, re-interpretation of the daemon flag for reduced service mode # No need for running the daemon with reduced service mode. So it is @@ -197,56 +169,89 @@ proc `hibernate=`*(ctx: BeaconCtxRef; val: bool) = # ----- -proc nHdrRespErrors*(buddy: BeaconBuddyRef): int = - ## Getter, returns the number of `resp` errors for argument `buddy` - buddy.only.nHdrRespErrors.int - -proc `nHdrRespErrors=`*(buddy: BeaconBuddyRef; count: uint8) = - ## Setter, set arbitrary `resp` error count for argument `buddy`. - buddy.only.nHdrRespErrors = count +func syncState*( + ctx: BeaconCtxRef; + ): (SyncState,HeaderChainMode,bool) = + ## Getter, triple of relevant run-time states + (ctx.pool.lastState, + ctx.hdrCache.state, + ctx.poolMode) -proc incHdrRespErrors*(buddy: BeaconBuddyRef) = - ## Increment `resp` error count for for argument `buddy`. - buddy.only.nHdrRespErrors.inc +func syncState*( + buddy: BeaconBuddyRef; + ): (BuddyRunState,SyncState,HeaderChainMode,bool) = + ## Getter, also includes buddy state + (buddy.ctrl.state, + buddy.ctx.pool.lastState, + buddy.ctx.hdrCache.state, + buddy.ctx.poolMode) +# ----- -proc initHdrProcErrors*(buddy: BeaconBuddyRef) = +proc initProcErrors*(buddy: BeaconBuddyRef) = ## Create error slot for argument `buddy` - buddy.ctx.pool.hdrProcError[buddy.peerID] = 0u8 + buddy.ctx.pool.nProcError[buddy.peerID] = (0u8,0u8) -proc clearHdrProcErrors*(buddy: BeaconBuddyRef) = +proc clearProcErrors*(buddy: BeaconBuddyRef) = ## Delete error slot for argument `buddy` - buddy.ctx.pool.hdrProcError.del buddy.peerID - doAssert buddy.ctx.pool.hdrProcError.len <= buddy.ctx.pool.nBuddies + buddy.ctx.pool.nProcError.del buddy.peerID + doAssert buddy.ctx.pool.nProcError.len <= buddy.ctx.pool.nBuddies + +# ----- proc nHdrProcErrors*(buddy: BeaconBuddyRef): int = ## Getter, returns the number of `proc` errors for argument `buddy` - buddy.ctx.pool.hdrProcError.withValue(buddy.peerID, val): - return val[].int + buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): + return val.hdr.int proc `nHdrProcErrors=`*(buddy: BeaconBuddyRef; count: uint8) = ## Setter, set arbitrary `proc` error count for argument `buddy`. Due ## to (hypothetical) hash collisions, the error register might have ## vanished in case a new one is instantiated. - buddy.ctx.pool.hdrProcError.withValue(buddy.peerID, val): - val[] = count + buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): + val.hdr = count do: - buddy.ctx.pool.hdrProcError[buddy.peerID] = count + buddy.ctx.pool.nProcError[buddy.peerID] = (count,0u8) proc incHdrProcErrors*(buddy: BeaconBuddyRef) = ## Increment `proc` error count for for argument `buddy`. Due to ## (hypothetical) hash collisions, the error register might have ## vanished in case a new one is instantiated. - buddy.ctx.pool.hdrProcError.withValue(buddy.peerID, val): - val[].inc + buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): + val.hdr.inc do: - buddy.ctx.pool.hdrProcError[buddy.peerID] = 1u8 + buddy.ctx.pool.nProcError[buddy.peerID] = (1u8,0u8) -proc incHdrProcErrors*(buddy: BeaconBuddyRef; peerID: Hash) = +proc incHdrProcErrors*(ctx: BeaconCtxRef; peerID: Hash) = ## Increment `proc` error count for for argument `peerID` entry if it ## has a slot. Otherwise the instruction is ignored. - buddy.ctx.pool.hdrProcError.withValue(peerID, val): - val[].inc + ctx.pool.nProcError.withValue(peerID, val): + val.hdr.inc + +# ----- + +proc nBlkProcErrors*(buddy: BeaconBuddyRef): int = + ## Getter, similar to `nHdrProcErrors()` + buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): + return val.blk.int + +proc incBlkProcErrors*(buddy: BeaconBuddyRef) = + ## Increment `proc` error count, similar to `incHdrProcErrors()` + buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): + val.blk.inc + do: + buddy.ctx.pool.nProcError[buddy.peerID] = (0u8,1u8) + +proc setBlkProcFail*(ctx: BeaconCtxRef; peerID: Hash) = + ## Set `proc` error count high enough so that the implied sync peer will + ## be zombified on the next attempt to download data. + ctx.pool.nProcError.withValue(peerID, val): + val.blk = nProcBlocksErrThreshold + 1 + +proc resetBlkProcErrors*(ctx: BeaconCtxRef; peerID: Hash) = + ## Reset `proc` error count. + ctx.pool.nProcError.withValue(peerID, val): + val.blk = 0 # ------------------------------------------------------------------------------ # End From ed78f6b3698ecfc3d4d332ba8ae5e56847399c39 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 30 May 2025 08:26:18 +0700 Subject: [PATCH 053/138] Move PooledTransaction from nim-eth to execution client (#3346) --- execution_chain/core/eip4844.nim | 1 + execution_chain/core/pooled_txs.nim | 32 ++ execution_chain/core/pooled_txs_rlp.nim | 84 +++++ execution_chain/core/tx_pool.nim | 3 +- execution_chain/core/tx_pool/tx_desc.nim | 1 + execution_chain/core/tx_pool/tx_item.nim | 1 + .../db/aristo/aristo_check/check_top.nim | 2 +- execution_chain/rpc/server_api.nim | 1 + .../sync/wire_protocol/handler.nim | 2 +- .../sync/wire_protocol/requester.nim | 1 + .../sync/wire_protocol/responder.nim | 1 + execution_chain/transaction.nim | 1 + execution_chain/utils/debug.nim | 1 + .../nodocker/engine/cancun/helpers.nim | 1 + .../engine/cancun/step_devp2p_pooledtx.nim | 5 +- .../engine/engine/invalid_payload.nim | 3 +- .../nodocker/engine/engine/prev_randao.nim | 5 +- .../nodocker/engine/engine/reorg.nim | 3 +- .../nodocker/engine/engine_client.nim | 1 + hive_integration/nodocker/engine/test_env.nim | 1 + .../nodocker/engine/tx_sender.nim | 3 +- hive_integration/nodocker/rpc/client.nim | 1 + hive_integration/nodocker/rpc/vault.nim | 1 + tests/all_tests.nim | 1 + tests/replay/pp.nim | 1 + tests/test_pooled_tx.nim | 300 ++++++++++++++++++ tests/test_txpool.nim | 1 + vendor/nim-eth | 2 +- 28 files changed, 449 insertions(+), 11 deletions(-) create mode 100644 execution_chain/core/pooled_txs.nim create mode 100644 execution_chain/core/pooled_txs_rlp.nim create mode 100644 tests/test_pooled_tx.nim diff --git a/execution_chain/core/eip4844.nim b/execution_chain/core/eip4844.nim index 298140fe6b..bf5f803a8b 100644 --- a/execution_chain/core/eip4844.nim +++ b/execution_chain/core/eip4844.nim @@ -14,6 +14,7 @@ import results, stint, ./eip7691, + ./pooled_txs, ./lazy_kzg as kzg, ../constants, ../common/common diff --git a/execution_chain/core/pooled_txs.nim b/execution_chain/core/pooled_txs.nim new file mode 100644 index 0000000000..a88b1bf981 --- /dev/null +++ b/execution_chain/core/pooled_txs.nim @@ -0,0 +1,32 @@ +# nimbus-execution-client +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +{.push raises: [].} + +import + eth/common/transactions + +export + transactions + +type + # 32 -> UInt256 + # 4096 -> FIELD_ELEMENTS_PER_BLOB + NetworkBlob* = array[32*4096, byte] + + BlobsBundle* = object + commitments*: seq[KzgCommitment] + proofs*: seq[KzgProof] + blobs*: seq[NetworkBlob] + + NetworkPayload* = ref BlobsBundle + + PooledTransaction* = object + tx*: Transaction + networkPayload*: NetworkPayload # EIP-4844 diff --git a/execution_chain/core/pooled_txs_rlp.nim b/execution_chain/core/pooled_txs_rlp.nim new file mode 100644 index 0000000000..4e711f9932 --- /dev/null +++ b/execution_chain/core/pooled_txs_rlp.nim @@ -0,0 +1,84 @@ +# nimbus-execution-client +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +{.push raises: [].} + +import + eth/common/transactions_rlp {.all.}, + ./pooled_txs + +export + transactions_rlp, + pooled_txs + +proc append(w: var RlpWriter, networkPayload: NetworkPayload) = + w.append(networkPayload.blobs) + w.append(networkPayload.commitments) + w.append(networkPayload.proofs) + +proc append*(w: var RlpWriter, tx: PooledTransaction) = + if tx.tx.txType != TxLegacy: + w.append(tx.tx.txType) + if tx.networkPayload != nil: + w.startList(4) # spec: rlp([tx_payload, blobs, commitments, proofs]) + w.appendTxPayload(tx.tx) + if tx.networkPayload != nil: + w.append(tx.networkPayload) + +proc read(rlp: var Rlp, T: type NetworkPayload): T {.raises: [RlpError].} = + result = NetworkPayload() + rlp.read(result.blobs) + rlp.read(result.commitments) + rlp.read(result.proofs) + +proc readTxTyped(rlp: var Rlp, tx: var PooledTransaction) {.raises: [RlpError].} = + let + txType = rlp.readTxType() + hasNetworkPayload = + if txType == TxEip4844: + rlp.listLen == 4 + else: + false + if hasNetworkPayload: + rlp.tryEnterList() # spec: rlp([tx_payload, blobs, commitments, proofs]) + rlp.readTxPayload(tx.tx, txType) + if hasNetworkPayload: + rlp.read(tx.networkPayload) + +proc read*(rlp: var Rlp, T: type PooledTransaction): T {.raises: [RlpError].} = + if rlp.isList: + rlp.readTxLegacy(result.tx) + else: + rlp.readTxTyped(result) + +proc read*( + rlp: var Rlp, T: (type seq[PooledTransaction]) | (type openArray[PooledTransaction]) +): seq[PooledTransaction] {.raises: [RlpError].} = + if not rlp.isList: + raise newException( + RlpTypeMismatch, "PooledTransaction list expected, but source RLP is not a list" + ) + for item in rlp: + var tx: PooledTransaction + if item.isList: + item.readTxLegacy(tx.tx) + else: + var rr = rlpFromBytes(rlp.read(seq[byte])) + rr.readTxTyped(tx) + result.add tx + +proc append*( + rlpWriter: var RlpWriter, txs: seq[PooledTransaction] | openArray[PooledTransaction] +) = + rlpWriter.startList(txs.len) + for tx in txs: + if tx.tx.txType == TxLegacy: + rlpWriter.append(tx) + else: + rlpWriter.append(rlp.encode(tx)) diff --git a/execution_chain/core/tx_pool.nim b/execution_chain/core/tx_pool.nim index b29299819d..705cb479ca 100644 --- a/execution_chain/core/tx_pool.nim +++ b/execution_chain/core/tx_pool.nim @@ -42,7 +42,8 @@ import ./tx_pool/tx_item, ./tx_pool/tx_desc, ./tx_pool/tx_packer, - ./chain/forked_chain + ./chain/forked_chain, + ./pooled_txs from eth/common/eth_types_rlp import rlpHash diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index d7ddc33b24..3aee08dcc1 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -27,6 +27,7 @@ import ../pow/header, ../eip4844, ../validate, + ../pooled_txs, ./tx_tabs, ./tx_item diff --git a/execution_chain/core/tx_pool/tx_item.nim b/execution_chain/core/tx_pool/tx_item.nim index 8cf56263e2..b83e532bc2 100644 --- a/execution_chain/core/tx_pool/tx_item.nim +++ b/execution_chain/core/tx_pool/tx_item.nim @@ -17,6 +17,7 @@ import std/[hashes, times], results, + ../pooled_txs, ../../utils/utils, ../../transaction diff --git a/execution_chain/db/aristo/aristo_check/check_top.nim b/execution_chain/db/aristo/aristo_check/check_top.nim index a8f5113e8e..47627a1cbe 100644 --- a/execution_chain/db/aristo/aristo_check/check_top.nim +++ b/execution_chain/db/aristo/aristo_check/check_top.nim @@ -11,7 +11,7 @@ {.push raises: [].} import - std/[sequtils, sets, typetraits], + std/[sequtils, sets], eth/common, results, ".."/[aristo_desc, aristo_get, aristo_layers, aristo_serialise, aristo_utils] diff --git a/execution_chain/rpc/server_api.nim b/execution_chain/rpc/server_api.nim index 8488875284..5d42eae8c3 100644 --- a/execution_chain/rpc/server_api.nim +++ b/execution_chain/rpc/server_api.nim @@ -26,6 +26,7 @@ import ../transaction/call_evm, ../evm/evm_errors, ../core/eip4844, + ../core/pooled_txs_rlp, ./rpc_types, ./rpc_utils, ./filters diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index 7047a41b4e..f266592dd2 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -15,7 +15,7 @@ import stew/endians2, ./types, ./requester, - ../../core/[chain, tx_pool], + ../../core/[chain, tx_pool, pooled_txs_rlp], ../../networking/p2p logScope: diff --git a/execution_chain/sync/wire_protocol/requester.nim b/execution_chain/sync/wire_protocol/requester.nim index ad269fd78e..270e035757 100644 --- a/execution_chain/sync/wire_protocol/requester.nim +++ b/execution_chain/sync/wire_protocol/requester.nim @@ -11,6 +11,7 @@ import chronos, eth/common, ./types, + ../../core/pooled_txs_rlp, ../../networking/rlpx, ../../networking/p2p_types diff --git a/execution_chain/sync/wire_protocol/responder.nim b/execution_chain/sync/wire_protocol/responder.nim index bd19678bbe..ad8ee14c95 100644 --- a/execution_chain/sync/wire_protocol/responder.nim +++ b/execution_chain/sync/wire_protocol/responder.nim @@ -16,6 +16,7 @@ import ./trace_config, ../../utils/utils, ../../common/logging, + ../../core/pooled_txs_rlp, ../../networking/p2p_protocol_dsl, ../../networking/p2p_types diff --git a/execution_chain/transaction.nim b/execution_chain/transaction.nim index c418c78501..af1dcd6835 100644 --- a/execution_chain/transaction.nim +++ b/execution_chain/transaction.nim @@ -7,6 +7,7 @@ import ./[constants], + ./core/pooled_txs_rlp, eth/common/[addresses, keys, transactions, transactions_rlp, transaction_utils] export addresses, keys, transactions diff --git a/execution_chain/utils/debug.nim b/execution_chain/utils/debug.nim index c3c64b86ac..5c5e6a53a3 100644 --- a/execution_chain/utils/debug.nim +++ b/execution_chain/utils/debug.nim @@ -15,6 +15,7 @@ import ../evm/state, ../evm/types, ../db/ledger, + ../core/pooled_txs, ./utils, ./state_dump diff --git a/hive_integration/nodocker/engine/cancun/helpers.nim b/hive_integration/nodocker/engine/cancun/helpers.nim index 2b668e7e99..0592ca23f2 100644 --- a/hive_integration/nodocker/engine/cancun/helpers.nim +++ b/hive_integration/nodocker/engine/cancun/helpers.nim @@ -19,6 +19,7 @@ import ../engine_client, ../../../../execution_chain/constants, ../../../../execution_chain/core/eip4844, + ../../../../execution_chain/core/pooled_txs, ../../../../execution_chain/core/lazy_kzg as kzg, ../../../../execution_chain/rpc/rpc_types, web3/execution_types, diff --git a/hive_integration/nodocker/engine/cancun/step_devp2p_pooledtx.nim b/hive_integration/nodocker/engine/cancun/step_devp2p_pooledtx.nim index 1188293bfb..7fefcb587c 100644 --- a/hive_integration/nodocker/engine/cancun/step_devp2p_pooledtx.nim +++ b/hive_integration/nodocker/engine/cancun/step_devp2p_pooledtx.nim @@ -17,7 +17,8 @@ import ../types, ../test_env, ../../../../execution_chain/utils/utils, - ../../../../execution_chain/sync/wire_protocol + ../../../../execution_chain/sync/wire_protocol, + ../../../../execution_chain/core/pooled_txs # A step that requests a Transaction hash via P2P and expects the correct full blob tx type @@ -90,7 +91,7 @@ method execute*(step: DevP2PRequestPooledTransactionHash, ctx: CancunTestContext return false # Send the request for the pooled transactions - let + let peer = sec.peer request = PooledTransactionsRequest(txHashes: txHashes) res = waitFor peer.getPooledTransactions(request) diff --git a/hive_integration/nodocker/engine/engine/invalid_payload.nim b/hive_integration/nodocker/engine/engine/invalid_payload.nim index 3726a43fc2..8d2fbdd2e2 100644 --- a/hive_integration/nodocker/engine/engine/invalid_payload.nim +++ b/hive_integration/nodocker/engine/engine/invalid_payload.nim @@ -13,7 +13,8 @@ import ./engine_spec, ../helper, ../cancun/customizer, - ../../../../execution_chain/common + ../../../../execution_chain/common, + ../../../../execution_chain/core/pooled_txs # Generate test cases for each field of NewPayload, where the payload contains a single invalid field and a valid hash. type diff --git a/hive_integration/nodocker/engine/engine/prev_randao.nim b/hive_integration/nodocker/engine/engine/prev_randao.nim index fe035dd5f0..73714757b0 100644 --- a/hive_integration/nodocker/engine/engine/prev_randao.nim +++ b/hive_integration/nodocker/engine/engine/prev_randao.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -13,7 +13,8 @@ import eth/common, chronicles, ./engine_spec, - ../helper + ../helper, + ../../../../execution_chain/core/pooled_txs type PrevRandaoTransactionTest* = ref object of EngineSpec diff --git a/hive_integration/nodocker/engine/engine/reorg.nim b/hive_integration/nodocker/engine/engine/reorg.nim index 79c5fcea6a..9de913cbaa 100644 --- a/hive_integration/nodocker/engine/engine/reorg.nim +++ b/hive_integration/nodocker/engine/engine/reorg.nim @@ -14,7 +14,8 @@ import stew/byteutils, ./engine_spec, ../cancun/customizer, - ../helper + ../helper, + ../../../../execution_chain/core/pooled_txs type SidechainReOrgTest* = ref object of EngineSpec diff --git a/hive_integration/nodocker/engine/engine_client.nim b/hive_integration/nodocker/engine/engine_client.nim index fbaafb60b7..89d8161d40 100644 --- a/hive_integration/nodocker/engine/engine_client.nim +++ b/hive_integration/nodocker/engine/engine_client.nim @@ -17,6 +17,7 @@ import eth/common/eth_types_rlp, chronos, json_rpc/[rpcclient, errors, jsonmarshal], ../../../execution_chain/beacon/web3_eth_conv, + ../../../execution_chain/core/pooled_txs_rlp, ./types import diff --git a/hive_integration/nodocker/engine/test_env.nim b/hive_integration/nodocker/engine/test_env.nim index 3a7e6e63cc..b61e8551d0 100644 --- a/hive_integration/nodocker/engine/test_env.nim +++ b/hive_integration/nodocker/engine/test_env.nim @@ -15,6 +15,7 @@ import json_rpc/rpcclient, ../../../execution_chain/config, ../../../execution_chain/common, + ../../../execution_chain/core/pooled_txs, ./clmock, ./engine_client, ./client_pool, diff --git a/hive_integration/nodocker/engine/tx_sender.nim b/hive_integration/nodocker/engine/tx_sender.nim index 9b8d715fb5..cf6b9674ac 100644 --- a/hive_integration/nodocker/engine/tx_sender.nim +++ b/hive_integration/nodocker/engine/tx_sender.nim @@ -18,7 +18,8 @@ import ./cancun/blobs, ../../../execution_chain/transaction, ../../../execution_chain/common, - ../../../execution_chain/utils/utils + ../../../execution_chain/utils/utils, + ../../../execution_chain/core/pooled_txs from std/sequtils import mapIt diff --git a/hive_integration/nodocker/rpc/client.nim b/hive_integration/nodocker/rpc/client.nim index 3cc2143744..ea7a9d5d8d 100644 --- a/hive_integration/nodocker/rpc/client.nim +++ b/hive_integration/nodocker/rpc/client.nim @@ -15,6 +15,7 @@ import ../../../execution_chain/transaction, ../../../execution_chain/utils/utils, ../../../execution_chain/beacon/web3_eth_conv, + ../../../execution_chain/core/pooled_txs_rlp, web3/eth_api export eth_api diff --git a/hive_integration/nodocker/rpc/vault.nim b/hive_integration/nodocker/rpc/vault.nim index a0a5d5c7ac..ec4aa1a6db 100644 --- a/hive_integration/nodocker/rpc/vault.nim +++ b/hive_integration/nodocker/rpc/vault.nim @@ -15,6 +15,7 @@ import json_rpc/[rpcclient], ../../../execution_chain/utils/utils, ../../../execution_chain/transaction, + ../../../execution_chain/core/pooled_txs_rlp, ./client when false: diff --git a/tests/all_tests.nim b/tests/all_tests.nim index 532381d897..fa754a6de4 100644 --- a/tests/all_tests.nim +++ b/tests/all_tests.nim @@ -34,6 +34,7 @@ import test_transaction_json, test_txpool, test_networking, + test_pooled_tx, # These two suites are much slower than all the rest, so run them last test_blockchain_json, test_generalstate_json, diff --git a/tests/replay/pp.nim b/tests/replay/pp.nim index 1a9b04603c..6e333af1a2 100644 --- a/tests/replay/pp.nim +++ b/tests/replay/pp.nim @@ -16,6 +16,7 @@ import eth/common, stew/byteutils, ../../execution_chain/common/chain_config, + ../../execution_chain/core/pooled_txs, ./pp_light export diff --git a/tests/test_pooled_tx.nim b/tests/test_pooled_tx.nim new file mode 100644 index 0000000000..e1ded95f96 --- /dev/null +++ b/tests/test_pooled_tx.nim @@ -0,0 +1,300 @@ +# nimbus-execution-client +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. +{.used.} + +import + stew/byteutils, + unittest2, + eth/common/transaction_utils, + ../execution_chain/core/pooled_txs_rlp + +const + recipient = address"095e7baea6a6c7c4c2dfeb977efac326af552d87" + zeroG1 = bytes48"0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + source = address"0x0000000000000000000000000000000000000001" + storageKey= default(Bytes32) + accesses = @[AccessPair(address: source, storageKeys: @[storageKey])] + blob = default(NetworkBlob) + abcdef = hexToSeqByte("abcdef") + authList = @[Authorization( + chainID: chainId(1), + address: source, + nonce: 2.AccountNonce, + v: 3, + r: 4.u256, + s: 5.u256 + )] + +proc tx0(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + txType: TxLegacy, + nonce: i.AccountNonce, + to: Opt.some recipient, + gasLimit: 1.GasInt, + gasPrice: 2.GasInt, + payload: abcdef)) + +proc tx1(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + # Legacy tx contract creation. + txType: TxLegacy, + nonce: i.AccountNonce, + gasLimit: 1.GasInt, + gasPrice: 2.GasInt, + payload: abcdef)) + +proc tx2(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + # Tx with non-zero access list. + txType: TxEip2930, + chainId: chainId(1), + nonce: i.AccountNonce, + to: Opt.some recipient, + gasLimit: 123457.GasInt, + gasPrice: 10.GasInt, + accessList: accesses, + payload: abcdef)) + +proc tx3(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + # Tx with empty access list. + txType: TxEip2930, + chainId: chainId(1), + nonce: i.AccountNonce, + to: Opt.some recipient, + gasLimit: 123457.GasInt, + gasPrice: 10.GasInt, + payload: abcdef)) + +proc tx4(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + # Contract creation with access list. + txType: TxEip2930, + chainId: chainId(1), + nonce: i.AccountNonce, + gasLimit: 123457.GasInt, + gasPrice: 10.GasInt, + accessList: accesses)) + +proc tx5(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + txType: TxEip1559, + chainId: chainId(1), + nonce: i.AccountNonce, + gasLimit: 123457.GasInt, + maxPriorityFeePerGas: 42.GasInt, + maxFeePerGas: 10.GasInt, + accessList: accesses)) + +proc tx6(i: int): PooledTransaction = + const + digest = hash32"010657f37554c781402a22917dee2f75def7ab966d7b770905398eba3c444014" + + PooledTransaction( + tx: Transaction( + txType: TxEip4844, + chainId: chainId(1), + nonce: i.AccountNonce, + gasLimit: 123457.GasInt, + maxPriorityFeePerGas:42.GasInt, + maxFeePerGas: 10.GasInt, + accessList: accesses, + versionedHashes: @[digest]), + networkPayload: NetworkPayload( + blobs: @[blob], + commitments: @[zeroG1], + proofs: @[zeroG1])) + +proc tx7(i: int): PooledTransaction = + const + digest = hash32"01624652859a6e98ffc1608e2af0147ca4e86e1ce27672d8d3f3c9d4ffd6ef7e" + + PooledTransaction( + tx: Transaction( + txType: TxEip4844, + chainID: chainId(1), + nonce: i.AccountNonce, + gasLimit: 123457.GasInt, + maxPriorityFeePerGas:42.GasInt, + maxFeePerGas: 10.GasInt, + accessList: accesses, + versionedHashes: @[digest], + maxFeePerBlobGas: 10000000.u256)) + +proc tx8(i: int): PooledTransaction = + const + digest = hash32"01624652859a6e98ffc1608e2af0147ca4e86e1ce27672d8d3f3c9d4ffd6ef7e" + + PooledTransaction( + tx: Transaction( + txType: TxEip4844, + chainID: chainId(1), + nonce: i.AccountNonce, + to: Opt.some(recipient), + gasLimit: 123457.GasInt, + maxPriorityFeePerGas:42.GasInt, + maxFeePerGas: 10.GasInt, + accessList: accesses, + versionedHashes: @[digest], + maxFeePerBlobGas: 10000000.u256)) + +proc txEip7702(i: int): PooledTransaction = + PooledTransaction( + tx: Transaction( + txType: TxEip7702, + chainId: chainId(1), + nonce: i.AccountNonce, + maxPriorityFeePerGas: 2.GasInt, + maxFeePerGas: 3.GasInt, + gasLimit: 4.GasInt, + to: Opt.some recipient, + value: 5.u256, + payload: abcdef, + accessList: accesses, + authorizationList: authList + ) + ) + +template roundTrip(txFunc: untyped, i: int) = + let tx = txFunc(i) + let bytes = rlp.encode(tx) + let tx2 = rlp.decode(bytes, PooledTransaction) + let bytes2 = rlp.encode(tx2) + check bytes == bytes2 + +suite "Transactions": + test "Legacy Tx Call": + roundTrip(tx0, 1) + + test "Legacy tx contract creation": + roundTrip(tx1, 2) + + test "Tx with non-zero access list": + roundTrip(tx2, 3) + + test "Tx with empty access list": + roundTrip(tx3, 4) + + test "Contract creation with access list": + roundTrip(tx4, 5) + + test "Dynamic Fee Tx": + roundTrip(tx5, 6) + + test "NetworkBlob Tx": + roundTrip(tx6, 7) + + test "Minimal Blob Tx": + roundTrip(tx7, 8) + + test "Minimal Blob Tx contract creation": + roundTrip(tx8, 9) + + test "EIP 7702": + roundTrip(txEip7702, 9) + + test "Network payload survive encode decode": + let tx = tx6(10) + let bytes = rlp.encode(tx) + let zz = rlp.decode(bytes, PooledTransaction) + check not zz.networkPayload.isNil + check zz.networkPayload.proofs == tx.networkPayload.proofs + check zz.networkPayload.blobs == tx.networkPayload.blobs + check zz.networkPayload.commitments == tx.networkPayload.commitments + + test "No Network payload still no network payload": + let tx = tx7(11) + let bytes = rlp.encode(tx) + let zz = rlp.decode(bytes, PooledTransaction) + check zz.networkPayload.isNil + + test "Minimal Blob tx recipient survive encode decode": + let tx = tx8(12) + let bytes = rlp.encode(tx) + let zz = rlp.decode(bytes, PooledTransaction) + check zz.tx.to.isSome + + test "Tx List 0,1,2,3,4,5,6,7,8": + let txs = @[tx0(3), tx1(3), tx2(3), tx3(3), tx4(3), + tx5(3), tx6(3), tx7(3), tx8(3)] + + let bytes = rlp.encode(txs) + let zz = rlp.decode(bytes, seq[PooledTransaction]) + let bytes2 = rlp.encode(zz) + check bytes2 == bytes + + test "Tx List 8,7,6,5,4,3,2,1,0": + let txs = @[tx8(3), tx7(3) , tx6(3), tx5(3), tx4(3), + tx3(3), tx2(3), tx1(3), tx0(3)] + + let bytes = rlp.encode(txs) + let zz = rlp.decode(bytes, seq[PooledTransaction]) + let bytes2 = rlp.encode(zz) + check bytes2 == bytes + + test "Tx List 0,5,8,7,6,4,3,2,1": + let txs = @[tx0(3), tx5(3), tx8(3), tx7(3), tx6(3), + tx4(3), tx3(3), tx2(3), tx1(3)] + + let bytes = rlp.encode(txs) + let zz = rlp.decode(bytes, seq[PooledTransaction]) + let bytes2 = rlp.encode(zz) + check bytes2 == bytes + + test "EIP-155 signature": + # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md#example + var + tx = Transaction( + txType: TxLegacy, + chainId: chainId(1), + nonce: 9, + gasPrice: 20000000000'u64, + gasLimit: 21000'u64, + to: Opt.some address"0x3535353535353535353535353535353535353535", + value: u256"1000000000000000000", + ) + txEnc = tx.encodeForSigning(true) + txHash = tx.rlpHashForSigning(true) + key = PrivateKey.fromHex("0x4646464646464646464646464646464646464646464646464646464646464646").expect( + "working key" + ) + + tx.signature = tx.sign(key, true) + + check: + txEnc.to0xHex == "0xec098504a817c800825208943535353535353535353535353535353535353535880de0b6b3a764000080018080" + txHash == hash32"0xdaf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53" + tx.V == 37 + tx.R == + u256"18515461264373351373200002665853028612451056578545711640558177340181847433846" + tx.S == + u256"46948507304638947509940763649030358759909902576025900602547168820602576006531" + + test "sign transaction": + let + txs = @[ + tx0(3).tx, tx1(3).tx, tx2(3).tx, tx3(3).tx, tx4(3).tx, + tx5(3).tx, tx6(3).tx, tx7(3).tx, tx8(3).tx, txEip7702(3).tx] + + privKey = PrivateKey.fromHex("63b508a03c3b5937ceb903af8b1b0c191012ef6eb7e9c3fb7afa94e5d214d376").expect("valid key") + sender = privKey.toPublicKey().to(Address) + + for tx in txs: + var tx = tx + tx.signature = tx.sign(privKey, true) + + check: + tx.recoverKey().expect("valid key").to(Address) == sender diff --git a/tests/test_txpool.nim b/tests/test_txpool.nim index 52321b4466..c40fe5ee13 100644 --- a/tests/test_txpool.nim +++ b/tests/test_txpool.nim @@ -22,6 +22,7 @@ import ../execution_chain/[config, transaction, constants], ../execution_chain/core/tx_pool, ../execution_chain/core/tx_pool/tx_desc, + ../execution_chain/core/pooled_txs, ../execution_chain/common/common, ../execution_chain/utils/utils, ./macro_assembler diff --git a/vendor/nim-eth b/vendor/nim-eth index addcbfa439..8f43bd6b5d 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit addcbfa4394727dabacd26856beb2a1931b483f6 +Subproject commit 8f43bd6b5d8e4cbee940a9e630080e7801c3835c From dd99049abea29e70c5e25d0c66c9ad27994c7653 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Fri, 30 May 2025 11:52:33 +0800 Subject: [PATCH 054/138] Portal client: Increase the default number of content queue workers (#3347) --- portal/client/nimbus_portal_client_conf.nim | 2 +- portal/network/beacon/beacon_network.nim | 2 +- portal/network/history/history_network.nim | 2 +- portal/network/state/state_network.nim | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/portal/client/nimbus_portal_client_conf.nim b/portal/client/nimbus_portal_client_conf.nim index 65651a13c6..b9afa1dffc 100644 --- a/portal/client/nimbus_portal_client_conf.nim +++ b/portal/client/nimbus_portal_client_conf.nim @@ -427,7 +427,7 @@ type hidden, desc: "The number of content queue workers to create for concurrent processing of received offers", - defaultValue: 8, + defaultValue: 50, name: "debug-content-queue-workers" .}: int diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index 66e76ddd75..664d540585 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -199,7 +199,7 @@ proc new*( trustedBlockRoot: Opt[Eth2Digest], bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, - contentQueueWorkers = 8, + contentQueueWorkers = 50, contentQueueSize = 50, ): T = let diff --git a/portal/network/history/history_network.nim b/portal/network/history/history_network.nim index d7f539ad67..b998f17700 100644 --- a/portal/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -353,7 +353,7 @@ proc new*( bootstrapRecords: openArray[Record] = [], portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig, contentRequestRetries = 1, - contentQueueWorkers = 8, + contentQueueWorkers = 50, contentQueueSize = 50, ): T = let diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index efff7ece0e..b54c19dd98 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -57,7 +57,7 @@ proc new*( historyNetwork = Opt.none(HistoryNetwork), validateStateIsCanonical = true, contentRequestRetries = 1, - contentQueueWorkers = 8, + contentQueueWorkers = 50, contentQueueSize = 50, ): T = let From e9068f04fcbf049a75b539084cfa520575e0049a Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Fri, 30 May 2025 13:23:56 +0800 Subject: [PATCH 055/138] Portal client: Improve portal testnet script (#3348) --- portal/scripts/launch_local_testnet.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/portal/scripts/launch_local_testnet.sh b/portal/scripts/launch_local_testnet.sh index 8f8d6c2ace..2ca70dbf53 100755 --- a/portal/scripts/launch_local_testnet.sh +++ b/portal/scripts/launch_local_testnet.sh @@ -38,9 +38,9 @@ LONGOPTS="help,nodes:,data-dir:,run-tests,log-level:,base-port:,base-rpc-port:,w # default values -NUM_NODES="8" # With the default radius of 254 which should result in ~1/4th -# of the data set stored on each node at least 8 nodes are recommended to -# provide complete coverage of the data set with approx replication factor of 2. +NUM_NODES="16" # With the default radius of 254 which should result in ~1/4th +# of the data set stored on each node at least 16 nodes are recommended to +# provide complete coverage of the data set with approx replication factor of 4. RADIUS="static:254" DATA_DIR="local_testnet_data" RUN_TESTS="0" @@ -301,7 +301,10 @@ fi for NUM_NODE in $(seq 0 $(( NUM_NODES - 1 ))); do NODE_DATA_DIR="${DATA_DIR}/node${NUM_NODE}" - rm -rf "${NODE_DATA_DIR}" + + if [[ "$REUSE_EXISTING_DATA_DIR" == "0" ]]; then + rm -rf "${NODE_DATA_DIR}" + fi "${SCRIPTS_DIR}"/makedir.sh "${NODE_DATA_DIR}" 2>&1 done From f917387424fbbb57b888bdc2e86ab77280b7f6c2 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 30 May 2025 14:18:16 +0700 Subject: [PATCH 056/138] Fix simulator (#3349) * Fix simulator * Fix pyspec --- hive_integration/nodocker/consensus/consensus_sim.nim | 1 + hive_integration/nodocker/pyspec/test_env.nim | 3 ++- hive_integration/nodocker/rpc/test_env.nim | 2 -- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hive_integration/nodocker/consensus/consensus_sim.nim b/hive_integration/nodocker/consensus/consensus_sim.nim index 41f210f701..6c9e10d72c 100644 --- a/hive_integration/nodocker/consensus/consensus_sim.nim +++ b/hive_integration/nodocker/consensus/consensus_sim.nim @@ -62,6 +62,7 @@ const unsupportedTests = [ # zig zag case, similar with total difficulty above "lotsOfBranchesOverrideAtTheEnd.json", "DaoTransactions.json", + "ChainAtoChainBtoChainAtoChainB.json", ] proc main() = diff --git a/hive_integration/nodocker/pyspec/test_env.nim b/hive_integration/nodocker/pyspec/test_env.nim index e2bd68f24f..68be9ea62e 100644 --- a/hive_integration/nodocker/pyspec/test_env.nim +++ b/hive_integration/nodocker/pyspec/test_env.nim @@ -51,7 +51,7 @@ proc setupELClient*(conf: ChainConfig, taskPool: Taskpool, node: JsonNode): Test memDB = newCoreDbRef DefaultDbMemory genesisHash = initializeDb(memDB, node) com = CommonRef.new(memDB, taskPool, conf) - chain = ForkedChainRef.init(com) + chain = ForkedChainRef.init(com, enableQueue=true) let headHash = chain.latestHash doAssert(headHash == genesisHash) @@ -78,3 +78,4 @@ proc setupELClient*(conf: ChainConfig, taskPool: Taskpool, node: JsonNode): Test proc stopELClient*(env: TestEnv) = waitFor env.rpcClient.close() waitFor env.rpcServer.closeWait() + waitFor env.chain.stopProcessingQueue() diff --git a/hive_integration/nodocker/rpc/test_env.nim b/hive_integration/nodocker/rpc/test_env.nim index d468037341..c810f408b7 100644 --- a/hive_integration/nodocker/rpc/test_env.nim +++ b/hive_integration/nodocker/rpc/test_env.nim @@ -64,11 +64,9 @@ proc stopRpcHttpServer(srv: RpcServer) = proc setupEnv*(taskPool: Taskpool): TestEnv = let conf = makeConfig(@[ - "--chaindb:archive", # "--nat:extip:0.0.0.0", "--network:7", "--import-key:" & initPath / "private-key", - "--engine-signer:658bdf435d810c91414ec09147daa6db62406379", "--custom-network:" & initPath / "genesis.json", "--rpc", "--rpc-api:eth,debug", From d443601c7ce41e1ffec8f81ce75191b81f8d60a6 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 30 May 2025 14:22:36 +0700 Subject: [PATCH 057/138] Align PooledTransaction to the spec (#3350) --- execution_chain/beacon/beacon_engine.nim | 4 +-- execution_chain/beacon/web3_eth_conv.nim | 3 +- execution_chain/core/eip4844.nim | 16 +++++----- execution_chain/core/pooled_txs.nim | 18 +++++------ execution_chain/core/pooled_txs_rlp.nim | 32 +++++++++++-------- execution_chain/core/tx_pool.nim | 10 +++--- execution_chain/core/tx_pool/tx_desc.nim | 4 +-- execution_chain/rpc/rpc_utils.nim | 5 +-- execution_chain/rpc/server_api.nim | 8 ++--- execution_chain/utils/debug.nim | 8 ++--- .../nodocker/engine/cancun/helpers.nim | 6 ++-- .../nodocker/engine/tx_sender.nim | 10 +++--- tests/replay/pp.nim | 2 +- tests/test_pooled_tx.nim | 14 ++++---- tests/test_txpool.nim | 4 +-- 15 files changed, 74 insertions(+), 70 deletions(-) diff --git a/execution_chain/beacon/beacon_engine.nim b/execution_chain/beacon/beacon_engine.nim index 9dd4cdbd48..cf15431036 100644 --- a/execution_chain/beacon/beacon_engine.nim +++ b/execution_chain/beacon/beacon_engine.nim @@ -8,7 +8,7 @@ # those terms. import - std/[sequtils, tables], + std/[tables], eth/common/[hashes, headers], chronicles, minilru, @@ -178,7 +178,7 @@ proc generateExecutionBundle*(ben: BeaconEngineRef, blobsBundle = Opt.some BlobsBundleV1( commitments: blobData.commitments, proofs: blobData.proofs, - blobs: blobData.blobs.mapIt it.Web3Blob) + blobs: blobData.blobs) ok ExecutionBundle( payload: executionPayload(bundle.blk), diff --git a/execution_chain/beacon/web3_eth_conv.nim b/execution_chain/beacon/web3_eth_conv.nim index 35ffe8c299..f2bbd4576d 100644 --- a/execution_chain/beacon/web3_eth_conv.nim +++ b/execution_chain/beacon/web3_eth_conv.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) # * MIT license ([LICENSE-MIT](LICENSE-MIT)) @@ -27,7 +27,6 @@ type Web3Quantity* = web3types.Quantity Web3ExtraData* = web3types.DynamicBytes[0, 32] Web3Tx* = engine_api_types.TypedTransaction - Web3Blob* = engine_api_types.Blob {.push gcsafe, raises:[].} diff --git a/execution_chain/core/eip4844.nim b/execution_chain/core/eip4844.nim index bf5f803a8b..3af8546b8e 100644 --- a/execution_chain/core/eip4844.nim +++ b/execution_chain/core/eip4844.nim @@ -177,35 +177,35 @@ func validateEip4844Header*( proc validateBlobTransactionWrapper*(tx: PooledTransaction): Result[void, string] {.raises: [].} = - if tx.networkPayload.isNil: + if tx.blobsBundle.isNil: return err("tx wrapper is none") # note: assert blobs are not malformatted let goodFormatted = tx.tx.versionedHashes.len == - tx.networkPayload.commitments.len and + tx.blobsBundle.commitments.len and tx.tx.versionedHashes.len == - tx.networkPayload.blobs.len and + tx.blobsBundle.blobs.len and tx.tx.versionedHashes.len == - tx.networkPayload.proofs.len + tx.blobsBundle.proofs.len if not goodFormatted: return err("tx wrapper is ill formatted") - let commitments = tx.networkPayload.commitments.mapIt( + let commitments = tx.blobsBundle.commitments.mapIt( kzg.KzgCommitment(bytes: it.data)) # Verify that commitments match the blobs by checking the KZG proof let res = kzg.verifyBlobKzgProofBatch( - tx.networkPayload.blobs.mapIt(kzg.KzgBlob(bytes: it)), + tx.blobsBundle.blobs.mapIt(kzg.KzgBlob(bytes: it.bytes)), commitments, - tx.networkPayload.proofs.mapIt(kzg.KzgProof(bytes: it.data))) + tx.blobsBundle.proofs.mapIt(kzg.KzgProof(bytes: it.data))) if res.isErr: return err(res.error) # Actual verification result if not res.get(): - return err("Failed to verify network payload of a transaction") + return err("Failed to verify blobs bundle of a transaction") # Now that all commitments have been verified, check that versionedHashes matches the commitments for i in 0 ..< tx.tx.versionedHashes.len: diff --git a/execution_chain/core/pooled_txs.nim b/execution_chain/core/pooled_txs.nim index a88b1bf981..a1f8c4fe97 100644 --- a/execution_chain/core/pooled_txs.nim +++ b/execution_chain/core/pooled_txs.nim @@ -10,23 +10,21 @@ {.push raises: [].} import - eth/common/transactions + eth/common/transactions, + web3/primitives export - transactions + transactions, + primitives type - # 32 -> UInt256 - # 4096 -> FIELD_ELEMENTS_PER_BLOB - NetworkBlob* = array[32*4096, byte] + KzgBlob* = primitives.Blob - BlobsBundle* = object + BlobsBundle* = ref object commitments*: seq[KzgCommitment] proofs*: seq[KzgProof] - blobs*: seq[NetworkBlob] - - NetworkPayload* = ref BlobsBundle + blobs*: seq[KzgBlob] PooledTransaction* = object tx*: Transaction - networkPayload*: NetworkPayload # EIP-4844 + blobsBundle*: BlobsBundle # EIP-4844 diff --git a/execution_chain/core/pooled_txs_rlp.nim b/execution_chain/core/pooled_txs_rlp.nim index 4e711f9932..41ee631434 100644 --- a/execution_chain/core/pooled_txs_rlp.nim +++ b/execution_chain/core/pooled_txs_rlp.nim @@ -17,22 +17,28 @@ export transactions_rlp, pooled_txs -proc append(w: var RlpWriter, networkPayload: NetworkPayload) = - w.append(networkPayload.blobs) - w.append(networkPayload.commitments) - w.append(networkPayload.proofs) +proc append(w: var RlpWriter, blob: Blob) = + w.append(blob.bytes) + +proc append(w: var RlpWriter, blobsBundle: BlobsBundle) = + w.append(blobsBundle.blobs) + w.append(blobsBundle.commitments) + w.append(blobsBundle.proofs) proc append*(w: var RlpWriter, tx: PooledTransaction) = if tx.tx.txType != TxLegacy: w.append(tx.tx.txType) - if tx.networkPayload != nil: + if tx.blobsBundle != nil: w.startList(4) # spec: rlp([tx_payload, blobs, commitments, proofs]) w.appendTxPayload(tx.tx) - if tx.networkPayload != nil: - w.append(tx.networkPayload) + if tx.blobsBundle != nil: + w.append(tx.blobsBundle) + +proc read(rlp: var Rlp, T: type Blob): T {.raises: [RlpError].} = + rlp.read(result.bytes) -proc read(rlp: var Rlp, T: type NetworkPayload): T {.raises: [RlpError].} = - result = NetworkPayload() +proc read(rlp: var Rlp, T: type BlobsBundle): T {.raises: [RlpError].} = + result = BlobsBundle() rlp.read(result.blobs) rlp.read(result.commitments) rlp.read(result.proofs) @@ -40,16 +46,16 @@ proc read(rlp: var Rlp, T: type NetworkPayload): T {.raises: [RlpError].} = proc readTxTyped(rlp: var Rlp, tx: var PooledTransaction) {.raises: [RlpError].} = let txType = rlp.readTxType() - hasNetworkPayload = + hasBlobsBundle = if txType == TxEip4844: rlp.listLen == 4 else: false - if hasNetworkPayload: + if hasBlobsBundle: rlp.tryEnterList() # spec: rlp([tx_payload, blobs, commitments, proofs]) rlp.readTxPayload(tx.tx, txType) - if hasNetworkPayload: - rlp.read(tx.networkPayload) + if hasBlobsBundle: + rlp.read(tx.blobsBundle) proc read*(rlp: var Rlp, T: type PooledTransaction): T {.raises: [RlpError].} = if rlp.isList: diff --git a/execution_chain/core/tx_pool.nim b/execution_chain/core/tx_pool.nim index 705cb479ca..276a30acfb 100644 --- a/execution_chain/core/tx_pool.nim +++ b/execution_chain/core/tx_pool.nim @@ -147,16 +147,16 @@ proc assembleBlock*( var blk = EthBlock( header: pst.assembleHeader(xp) ) - var blobsBundle: BlobsBundle + var blobsBundle = BlobsBundle() for item in pst.packedTxs: let tx = item.pooledTx blk.txs.add tx.tx - if tx.networkPayload != nil: - for k in tx.networkPayload.commitments: + if tx.blobsBundle != nil: + for k in tx.blobsBundle.commitments: blobsBundle.commitments.add k - for p in tx.networkPayload.proofs: + for p in tx.blobsBundle.proofs: blobsBundle.proofs.add p - for blob in tx.networkPayload.blobs: + for blob in tx.blobsBundle.blobs: blobsBundle.blobs.add blob blk.header.transactionsRoot = calcTxRoot(blk.txs) diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index 3aee08dcc1..57f893abfb 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -414,9 +414,9 @@ iterator byPriceAndNonce*(xp: TxPoolRef): TxItemRef = func getBlobAndProofV1*(xp: TxPoolRef, v: VersionedHash): Opt[BlobAndProofV1] = xp.blobTab.withValue(v, val): - let np = val.item.pooledTx.networkPayload + let np = val.item.pooledTx.blobsBundle return Opt.some(BlobAndProofV1( - blob: engine_api_types.Blob(np.blobs[val.blobIndex]), + blob: np.blobs[val.blobIndex], proof: np.proofs[val.blobIndex])) Opt.none(BlobAndProofV1) diff --git a/execution_chain/rpc/rpc_utils.nim b/execution_chain/rpc/rpc_utils.nim index 52982f2bd4..8b79b0b32e 100644 --- a/execution_chain/rpc/rpc_utils.nim +++ b/execution_chain/rpc/rpc_utils.nim @@ -149,7 +149,8 @@ proc populateBlockObject*(blockHash: Hash32, result.mixHash = Hash32 header.mixHash # discard sizeof(seq[byte]) of extraData and use actual length - let size = sizeof(eth_types.Header) - sizeof(eth_api_types.Blob) + header.extraData.len + type ExtraDataType = typeof(header.extraData) + let size = sizeof(eth_types.Header) - sizeof(ExtraDataType) + header.extraData.len result.size = Quantity(size) result.gasLimit = Quantity(header.gasLimit) @@ -181,7 +182,7 @@ proc populateBlockObject*(blockHash: Hash32, proc populateReceipt*(rec: StoredReceipt, gasUsed: GasInt, tx: Transaction, txIndex: uint64, header: Header, com: CommonRef): ReceiptObject = - let + let sender = tx.recoverSender() receipt = rec.to(Receipt) var res = ReceiptObject() diff --git a/execution_chain/rpc/server_api.nim b/execution_chain/rpc/server_api.nim index 5d42eae8c3..cd7a6affd4 100644 --- a/execution_chain/rpc/server_api.nim +++ b/execution_chain/rpc/server_api.nim @@ -499,7 +499,7 @@ proc setupServerAPI*(api: ServerAPIRef, server: RpcServer, ctx: EthContext) = tx = unsignedTx(data, api.chain, accDB.getNonce(address) + 1, api.com.chainId) eip155 = api.com.isEIP155(api.chain.latestNumber) signedTx = signTransaction(tx, acc.privateKey, eip155) - networkPayload = + blobsBundle = if signedTx.txType == TxEip4844: if data.blobs.isNone or data.commitments.isNone or data.proofs.isNone: raise newException(ValueError, "EIP-4844 transaction needs blobs") @@ -509,8 +509,8 @@ proc setupServerAPI*(api: ServerAPIRef, server: RpcServer, ctx: EthContext) = raise newException(ValueError, "Incorrect number of commitments") if data.proofs.get.len != signedTx.versionedHashes.len: raise newException(ValueError, "Incorrect number of proofs") - NetworkPayload( - blobs: data.blobs.get.mapIt it.NetworkBlob, + BlobsBundle( + blobs: data.blobs.get, commitments: data.commitments.get, proofs: data.proofs.get, ) @@ -518,7 +518,7 @@ proc setupServerAPI*(api: ServerAPIRef, server: RpcServer, ctx: EthContext) = if data.blobs.isSome or data.commitments.isSome or data.proofs.isSome: raise newException(ValueError, "Blobs require EIP-4844 transaction") nil - pooledTx = PooledTransaction(tx: signedTx, networkPayload: networkPayload) + pooledTx = PooledTransaction(tx: signedTx, blobsBundle: blobsBundle) api.txPool.addTx(pooledTx).isOkOr: raise newException(ValueError, $error) diff --git a/execution_chain/utils/debug.nim b/execution_chain/utils/debug.nim index 5c5e6a53a3..a7dec382b2 100644 --- a/execution_chain/utils/debug.nim +++ b/execution_chain/utils/debug.nim @@ -137,13 +137,13 @@ proc debug*(tx: Transaction): string = proc debug*(tx: PooledTransaction): string = result.add debug(tx.tx) - if tx.networkPayload.isNil: + if tx.blobsBundle.isNil: result.add "networkPaylod : nil\n" else: result.add "networkPaylod : \n" - result.add " - blobs : " & $tx.networkPayload.blobs.len & "\n" - result.add " - commitments : " & $tx.networkPayload.commitments.len & "\n" - result.add " - proofs : " & $tx.networkPayload.proofs.len & "\n" + result.add " - blobs : " & $tx.blobsBundle.blobs.len & "\n" + result.add " - commitments : " & $tx.blobsBundle.commitments.len & "\n" + result.add " - proofs : " & $tx.blobsBundle.proofs.len & "\n" proc debugSum*(h: Header): string = result.add "txRoot : " & $h.txRoot & "\n" diff --git a/hive_integration/nodocker/engine/cancun/helpers.nim b/hive_integration/nodocker/engine/cancun/helpers.nim index 0592ca23f2..ec82331931 100644 --- a/hive_integration/nodocker/engine/cancun/helpers.nim +++ b/hive_integration/nodocker/engine/cancun/helpers.nim @@ -168,10 +168,10 @@ proc getBlobDataInPayload*(pool: TestBlobTxPool, payload: ExecutionPayload): Res return err("could not find transaction in the pool") let blobTx = pool.transactions[txHash] - if blobTx.networkPayload.isNil: + if blobTx.blobsBundle.isNil: return err("blob data is nil") - let np = blobTx.networkPayload + let np = blobTx.blobsBundle if blobTx.tx.versionedHashes.len != np.commitments.len or np.commitments.len != np.blobs.len or np.blobs.len != np.proofs.len: @@ -181,7 +181,7 @@ proc getBlobDataInPayload*(pool: TestBlobTxPool, payload: ExecutionPayload): Res blobData.data.add BlobWrapData( versionedHash: blobTx.tx.versionedHashes[i], commitment : kzg.KzgCommitment(bytes: np.commitments[i].data), - blob : kzg.KzgBlob(bytes: np.blobs[i]), + blob : kzg.KzgBlob(bytes: np.blobs[i].bytes), proof : kzg.KzgProof(bytes: np.proofs[i].data), ) blobData.txs.add blobTx.tx diff --git a/hive_integration/nodocker/engine/tx_sender.nim b/hive_integration/nodocker/engine/tx_sender.nim index cf6b9674ac..c0a7ddb070 100644 --- a/hive_integration/nodocker/engine/tx_sender.nim +++ b/hive_integration/nodocker/engine/tx_sender.nim @@ -200,8 +200,8 @@ proc makeTxOfType(params: MakeTxParams, tc: BaseTx): PooledTransaction = maxFeePerBlobGas: blobFeeCap, versionedHashes: system.move(blobData.hashes), ), - networkPayload: NetworkPayload( - blobs: blobData.blobs.mapIt(it.bytes), + blobsBundle: BlobsBundle( + blobs: blobData.blobs.mapIt(KzgBlob it.bytes), commitments: blobData.commitments.mapIt(KzgCommitment it.bytes), proofs: blobData.proofs.mapIt(KzgProof it.bytes), ) @@ -229,7 +229,7 @@ proc makeTx(params: MakeTxParams, tc: BaseTx): PooledTransaction = let tx = makeTxOfType(params, tc) PooledTransaction( tx: signTransaction(tx.tx, params.key), - networkPayload: tx.networkPayload) + blobsBundle: tx.blobsBundle) proc makeTx(params: MakeTxParams, tc: BigInitcodeTx): PooledTransaction = var tx = tc @@ -359,8 +359,8 @@ proc makeTx*(params: MakeTxParams, tc: BlobTx): PooledTransaction = PooledTransaction( tx: signTransaction(unsignedTx, params.key), - networkPayload: NetworkPayload( - blobs : data.blobs.mapIt(it.bytes), + blobsBundle: BlobsBundle( + blobs : data.blobs.mapIt(KzgBlob it.bytes), commitments: data.commitments.mapIt(KzgCommitment it.bytes), proofs : data.proofs.mapIt(KzgProof it.bytes), ), diff --git a/tests/replay/pp.nim b/tests/replay/pp.nim index 6e333af1a2..aac9a5906c 100644 --- a/tests/replay/pp.nim +++ b/tests/replay/pp.nim @@ -41,7 +41,7 @@ func pp*(a: openArray[Address]): string = func pp*(a: Bytes8|Bytes32): string = a.toHex -func pp*(a: NetworkPayload): string = +func pp*(a: BlobsBundle): string = if a.isNil: "n/a" else: diff --git a/tests/test_pooled_tx.nim b/tests/test_pooled_tx.nim index e1ded95f96..d0b440e7fe 100644 --- a/tests/test_pooled_tx.nim +++ b/tests/test_pooled_tx.nim @@ -21,7 +21,7 @@ const source = address"0x0000000000000000000000000000000000000001" storageKey= default(Bytes32) accesses = @[AccessPair(address: source, storageKeys: @[storageKey])] - blob = default(NetworkBlob) + blob = default(KzgBlob) abcdef = hexToSeqByte("abcdef") authList = @[Authorization( chainID: chainId(1), @@ -113,7 +113,7 @@ proc tx6(i: int): PooledTransaction = maxFeePerGas: 10.GasInt, accessList: accesses, versionedHashes: @[digest]), - networkPayload: NetworkPayload( + blobsBundle: BlobsBundle( blobs: @[blob], commitments: @[zeroG1], proofs: @[zeroG1])) @@ -210,16 +210,16 @@ suite "Transactions": let tx = tx6(10) let bytes = rlp.encode(tx) let zz = rlp.decode(bytes, PooledTransaction) - check not zz.networkPayload.isNil - check zz.networkPayload.proofs == tx.networkPayload.proofs - check zz.networkPayload.blobs == tx.networkPayload.blobs - check zz.networkPayload.commitments == tx.networkPayload.commitments + check not zz.blobsBundle.isNil + check zz.blobsBundle.proofs == tx.blobsBundle.proofs + check zz.blobsBundle.blobs == tx.blobsBundle.blobs + check zz.blobsBundle.commitments == tx.blobsBundle.commitments test "No Network payload still no network payload": let tx = tx7(11) let bytes = rlp.encode(tx) let zz = rlp.decode(bytes, PooledTransaction) - check zz.networkPayload.isNil + check zz.blobsBundle.isNil test "Minimal Blob tx recipient survive encode decode": let tx = tx8(12) diff --git a/tests/test_txpool.nim b/tests/test_txpool.nim index c40fe5ee13..e88c53932e 100644 --- a/tests/test_txpool.nim +++ b/tests/test_txpool.nim @@ -215,9 +215,9 @@ proc txPoolMain*() = blobID: 0.BlobID ) var ptx = mx.makeTx(tc, 0) - var z = ptx.networkPayload.blobs[0] + var z = ptx.blobsBundle.blobs[0].bytes z[0] = not z[0] - ptx.networkPayload.blobs[0] = z + ptx.blobsBundle.blobs[0] = KzgBlob z xp.checkAddTx(ptx, txErrorInvalidBlob) test "Bad chainId": From d4442ed2f5ae2dd206929ccdaf82b95bea92837b Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Sat, 31 May 2025 19:44:49 +0530 Subject: [PATCH 058/138] dynamic blob gas calculation (#3354) * dynamic blob gas calculation * fix tests * fix additional tests * remove constants --- execution_chain/common/common.nim | 10 +++++++++ execution_chain/constants.nim | 4 ---- execution_chain/core/eip4844.nim | 10 ++++----- execution_chain/core/eip7691.nim | 22 ++++++++++--------- .../core/executor/process_transaction.nim | 2 +- execution_chain/core/tx_pool/tx_desc.nim | 4 ++-- execution_chain/core/tx_pool/tx_packer.nim | 5 ++--- .../engine/cancun/step_newpayloads.nim | 2 +- tools/t8n/transition.nim | 4 ++-- 9 files changed, 35 insertions(+), 28 deletions(-) diff --git a/execution_chain/common/common.nim b/execution_chain/common/common.nim index b024139a87..0392be28c6 100644 --- a/execution_chain/common/common.nim +++ b/execution_chain/common/common.nim @@ -278,6 +278,16 @@ func toHardFork*( com: CommonRef, forkDeterminer: ForkDeterminationInfo): HardFork = toHardFork(com.forkTransitionTable, forkDeterminer) +func toHardFork*(com: CommonRef, timestamp: EthTime): HardFork = + for fork in countdown(com.forkTransitionTable.timeThresholds.high, Shanghai): + if com.forkTransitionTable.timeThresholds[fork].isSome and timestamp >= com.forkTransitionTable.timeThresholds[fork].get: + return fork + +func toEVMFork*(com: CommonRef, timestamp: EthTime): EVMFork = + ## similar to toHardFork, but produce EVMFork + let fork = com.toHardFork(timestamp) + ToEVMFork[fork] + func toEVMFork*(com: CommonRef, forkDeterminer: ForkDeterminationInfo): EVMFork = ## similar to toFork, but produce EVMFork let fork = com.toHardFork(forkDeterminer) diff --git a/execution_chain/constants.nim b/execution_chain/constants.nim index ed72024808..7fb60cdd66 100644 --- a/execution_chain/constants.nim +++ b/execution_chain/constants.nim @@ -89,10 +89,6 @@ const MAX_BLOB_GAS_PER_BLOCK* = 786432 MAX_BLOBS_PER_BLOCK* = int(MAX_BLOB_GAS_PER_BLOCK div GAS_PER_BLOB) - MAX_BLOB_GAS_PER_BLOCK_ELECTRA* = 1179648 - TARGET_BLOB_GAS_PER_BLOCK_ELECTRA* = 786432 - MAX_BLOBS_PER_BLOCK_ELECTRA* = int(MAX_BLOB_GAS_PER_BLOCK_ELECTRA div GAS_PER_BLOB) - # EIP-4788 addresses # BEACON_ROOTS_ADDRESS is the address where historical beacon roots are stored as per EIP-4788 BEACON_ROOTS_ADDRESS* = address"0x000F3df6D732807Ef1319fB7B8bB8522d0Beac02" diff --git a/execution_chain/core/eip4844.nim b/execution_chain/core/eip4844.nim index 3af8546b8e..61c8533622 100644 --- a/execution_chain/core/eip4844.nim +++ b/execution_chain/core/eip4844.nim @@ -82,11 +82,11 @@ proc pointEvaluation*(input: openArray[byte]): Result[void, string] = ok() # calcExcessBlobGas implements calc_excess_data_gas from EIP-4844 -proc calcExcessBlobGas*(parent: Header, electra: bool): uint64 = +proc calcExcessBlobGas*(com: CommonRef, parent: Header, fork: EVMFork): uint64 = let excessBlobGas = parent.excessBlobGas.get(0'u64) blobGasUsed = parent.blobGasUsed.get(0'u64) - targetBlobGasPerBlock = getTargetBlobGasPerBlock(electra) + targetBlobGasPerBlock = com.getTargetBlobsPerBlock(fork) * GAS_PER_BLOB if excessBlobGas + blobGasUsed < targetBlobGasPerBlock: 0'u64 @@ -157,12 +157,12 @@ func validateEip4844Header*( return err("expect EIP-4844 excessBlobGas in block header") let - electra = com.isPragueOrLater(header.timestamp) + fork = com.toEVMFork(header) headerBlobGasUsed = header.blobGasUsed.get() blobGasUsed = blobGasUsed(txs) headerExcessBlobGas = header.excessBlobGas.get - excessBlobGas = calcExcessBlobGas(parentHeader, electra) - maxBlobGasPerBlock = getMaxBlobGasPerBlock(electra) + excessBlobGas = calcExcessBlobGas(com, parentHeader, fork) + maxBlobGasPerBlock = com.getMaxBlobGasPerBlock(fork) if blobGasUsed > maxBlobGasPerBlock: return err("blobGasUsed " & $blobGasUsed & " exceeds maximum allowance " & $maxBlobGasPerBlock) diff --git a/execution_chain/core/eip7691.nim b/execution_chain/core/eip7691.nim index f036007674..c7f2fa1242 100644 --- a/execution_chain/core/eip7691.nim +++ b/execution_chain/core/eip7691.nim @@ -16,14 +16,6 @@ import ../common/evmforks, ../common/common -func getMaxBlobGasPerBlock*(electra: bool): uint64 = - if electra: MAX_BLOB_GAS_PER_BLOCK_ELECTRA.uint64 - else: MAX_BLOB_GAS_PER_BLOCK.uint64 - -func getTargetBlobGasPerBlock*(electra: bool): uint64 = - if electra: TARGET_BLOB_GAS_PER_BLOCK_ELECTRA.uint64 - else: TARGET_BLOB_GAS_PER_BLOCK.uint64 - const EVMForkToFork: array[FkCancun..FkLatest, HardFork] = [ Cancun, @@ -32,9 +24,19 @@ const ] func getMaxBlobsPerBlock*(com: CommonRef, fork: EVMFork): uint64 = - doAssert(fork >= FkCancun) + if fork < FkCancun: + return 0 com.maxBlobsPerBlock(EVMForkToFork[fork]) +func getTargetBlobsPerBlock*(com: CommonRef, fork: EVMFork): uint64 = + if fork < FkCancun: + return 0 + com.targetBlobsPerBlock(EVMForkToFork[fork]) + func getBlobBaseFeeUpdateFraction*(com: CommonRef, fork: EVMFork): uint64 = - doAssert(fork >= FkCancun) + if fork < FkCancun: + return 0 com.baseFeeUpdateFraction(EVMForkToFork[fork]) + +func getMaxBlobGasPerBlock*(com: CommonRef, fork: EVMFork): uint64 = + com.getMaxBlobsPerBlock(fork) * GAS_PER_BLOB \ No newline at end of file diff --git a/execution_chain/core/executor/process_transaction.nim b/execution_chain/core/executor/process_transaction.nim index 9a7f0886c2..c4bbba828f 100644 --- a/execution_chain/core/executor/process_transaction.nim +++ b/execution_chain/core/executor/process_transaction.nim @@ -94,7 +94,7 @@ proc processTransactionImpl( # blobGasUsed will be added to vmState.blobGasUsed if the tx is ok. let blobGasUsed = tx.getTotalBlobGas - maxBlobGasPerBlock = getMaxBlobGasPerBlock(vmState.fork >= FkPrague) + maxBlobGasPerBlock = getMaxBlobGasPerBlock(vmState.com, vmState.fork) if vmState.blobGasUsed + blobGasUsed > maxBlobGasPerBlock: return err("blobGasUsed " & $blobGasUsed & " exceeds maximum allowance " & $maxBlobGasPerBlock) diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index 57f893abfb..10ac91192f 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -87,7 +87,7 @@ proc setupVMState(com: CommonRef; pos: PosPayloadAttr, parentFrame: CoreDbTxRef): BaseVMState = let - electra = com.isPragueOrLater(pos.timestamp) + fork = com.toEVMFork(pos.timestamp) BaseVMState.new( parent = parent, @@ -98,7 +98,7 @@ proc setupVMState(com: CommonRef; prevRandao : pos.prevRandao, difficulty : UInt256.zero(), coinbase : pos.feeRecipient, - excessBlobGas: calcExcessBlobGas(parent, electra), + excessBlobGas: com.calcExcessBlobGas(parent, fork), parentHash : parentHash, ), txFrame = parentFrame.txFrameBegin(), diff --git a/execution_chain/core/tx_pool/tx_packer.nim b/execution_chain/core/tx_pool/tx_packer.nim index cec06d6c0d..cde83aa215 100644 --- a/execution_chain/core/tx_pool/tx_packer.nim +++ b/execution_chain/core/tx_pool/tx_packer.nim @@ -71,7 +71,7 @@ proc classifyValidatePacked(vmState: BaseVMState; item: TxItemRef): bool = baseFee = vmState.blockCtx.baseFeePerGas.get(0.u256) fork = vmState.fork gasLimit = vmState.blockCtx.gasLimit - excessBlobGas = calcExcessBlobGas(vmState.parent, fork >= FkPrague) + excessBlobGas = calcExcessBlobGas(vmState.com, vmState.parent, fork) roDB.validateTransaction( item.tx, item.sender, gasLimit, baseFee, excessBlobGas, vmState.com, fork).isOk @@ -159,7 +159,6 @@ proc vmExecGrabItem(pst: var TxPacker; item: TxItemRef, xp: TxPoolRef): bool = ## values are below the maximum block size. let vmState = pst.vmState - electra = vmState.fork >= FkPrague # EIP-4844 if item.tx.txType == TxEip4844: @@ -169,7 +168,7 @@ proc vmExecGrabItem(pst: var TxPacker; item: TxItemRef, xp: TxPoolRef): bool = let blobGasUsed = item.tx.getTotalBlobGas - maxBlobGasPerBlock = getMaxBlobGasPerBlock(electra) + maxBlobGasPerBlock = getMaxBlobGasPerBlock(vmState.com, vmState.fork) if vmState.blobGasUsed + blobGasUsed > maxBlobGasPerBlock: return ContinueWithNextAccount diff --git a/hive_integration/nodocker/engine/cancun/step_newpayloads.nim b/hive_integration/nodocker/engine/cancun/step_newpayloads.nim index 876199af86..c8260a8e84 100644 --- a/hive_integration/nodocker/engine/cancun/step_newpayloads.nim +++ b/hive_integration/nodocker/engine/cancun/step_newpayloads.nim @@ -73,7 +73,7 @@ proc verifyPayload(step: NewPayloads, excessBlobGas: Opt.some(parentExcessBlobGas), blobGasUsed: Opt.some(parentBlobGasUsed) ) - expectedExcessBlobGas = calcExcessBlobGas(parent, com.isPragueOrLater(payload.timestamp.EthTime)) + expectedExcessBlobGas = com.calcExcessBlobGas(parent, com.toEVMFork(payload.timestamp.EthTime)) if com.isCancunOrLater(payload.timestamp.EthTime): if payload.excessBlobGas.isNone: diff --git a/tools/t8n/transition.nim b/tools/t8n/transition.nim index 540e5df930..38a21ac2ca 100644 --- a/tools/t8n/transition.nim +++ b/tools/t8n/transition.nim @@ -351,7 +351,7 @@ proc exec(ctx: TransContext, if ctx.env.currentExcessBlobGas.isSome: excessBlobGas = ctx.env.currentExcessBlobGas elif ctx.env.parentExcessBlobGas.isSome and ctx.env.parentBlobGasUsed.isSome: - excessBlobGas = Opt.some calcExcessBlobGas(vmState.parent, vmState.fork >= FkPrague) + excessBlobGas = Opt.some calcExcessBlobGas(vmState.com, vmState.parent, vmState.fork) if excessBlobGas.isSome: result.result.blobGasUsed = Opt.some vmState.blobGasUsed @@ -535,7 +535,7 @@ proc transitionAction*(ctx: var TransContext, conf: T8NConf) = # If it is not explicitly defined, but we have the parent values, we try # to calculate it ourselves. if parent.excessBlobGas.isSome and parent.blobGasUsed.isSome: - ctx.env.currentExcessBlobGas = Opt.some calcExcessBlobGas(parent, com.isPragueOrLater(ctx.env.currentTimestamp)) + ctx.env.currentExcessBlobGas = Opt.some com.calcExcessBlobGas(parent, com.toEVMFork(ctx.env.currentTimestamp)) let header = envToHeader(ctx.env) From 164405ce84a590576dea4b4feff75431ddc4de51 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Mon, 2 Jun 2025 11:17:43 +0800 Subject: [PATCH 059/138] Portal bridge: Use startEra and endEra parameters in history bridge backfill audit (#3351) --- portal/bridge/history/portal_history_bridge.nim | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/portal/bridge/history/portal_history_bridge.nim b/portal/bridge/history/portal_history_bridge.nim index 27e7b26738..09f0d7b19c 100644 --- a/portal/bridge/history/portal_history_bridge.nim +++ b/portal/bridge/history/portal_history_bridge.nim @@ -355,17 +355,20 @@ proc runBackfillLoop( continue proc runBackfillLoopAuditMode( - bridge: PortalHistoryBridge, era1Dir: string + bridge: PortalHistoryBridge, era1Dir: string, startEra: uint64, endEra: uint64 ) {.async: (raises: [CancelledError]).} = let rng = newRng() db = Era1DB.new(era1Dir, "mainnet", loadAccumulator()) + blockLowerBound = startEra * EPOCH_SIZE # inclusive + blockUpperBound = ((endEra + 1) * EPOCH_SIZE) - 1 # inclusive + blockRange = blockUpperBound - blockLowerBound var blockTuple: BlockTuple while true: let # Grab a random blockNumber to audit and potentially gossip - blockNumber = rng[].rand(network_metadata.mergeBlockNumber - 1).uint64 + blockNumber = blockLowerBound + rng[].rand(blockRange).uint64 db.getBlockTuple(blockNumber, blockTuple).isOkOr: error "Failed to get block tuple", error, blockNumber continue @@ -525,7 +528,9 @@ proc runHistory*(config: PortalBridgeConf) = if config.backfill: if config.audit: - asyncSpawn bridge.runBackfillLoopAuditMode(config.era1Dir.string) + asyncSpawn bridge.runBackfillLoopAuditMode( + config.era1Dir.string, config.startEra, config.endEra + ) else: asyncSpawn bridge.runBackfillLoop( config.era1Dir.string, config.startEra, config.endEra From 82dff06a6b09b5299a9831bf3e9b306bbfe927d7 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Mon, 2 Jun 2025 13:07:08 +0800 Subject: [PATCH 060/138] Portal client: Create metric to track successful and failed offer validations for all sub-networks (#3352) --- portal/network/beacon/beacon_network.nim | 6 ++++++ portal/network/history/history_network.nim | 7 +++++++ portal/network/state/state_network.nim | 18 +++++++++--------- portal/network/wire/portal_protocol.nim | 7 ++++++- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/portal/network/beacon/beacon_network.nim b/portal/network/beacon/beacon_network.nim index 664d540585..49cbd81639 100644 --- a/portal/network/beacon/beacon_network.nim +++ b/portal/network/beacon/beacon_network.nim @@ -11,6 +11,7 @@ import results, chronos, chronicles, + metrics, eth/p2p/discoveryv5/[protocol, enr], beacon_chain/spec/forks, beacon_chain/gossip_processing/light_client_processor, @@ -455,8 +456,13 @@ proc contentQueueWorker(n: BeaconNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): + portal_offer_validation_successful.inc( + labelValues = [$n.portalProtocol.protocolId] + ) discard await n.portalProtocol.randomGossip(srcNodeId, contentKeys, contentItems) + else: + portal_offer_validation_failed.inc(labelValues = [$n.portalProtocol.protocolId]) except CancelledError: trace "contentQueueWorker canceled" diff --git a/portal/network/history/history_network.nim b/portal/network/history/history_network.nim index b998f17700..dfd53e85de 100644 --- a/portal/network/history/history_network.nim +++ b/portal/network/history/history_network.nim @@ -11,6 +11,7 @@ import results, chronos, chronicles, + metrics, eth/trie/ordered_trie, eth/common/[hashes, headers_rlp, blocks_rlp, receipts_rlp, transactions_rlp], eth/p2p/discoveryv5/[protocol, enr], @@ -430,9 +431,15 @@ proc contentQueueWorker(n: HistoryNetwork) {.async: (raises: []).} = # TODO: Differentiate between failures due to invalid data and failures # due to missing network data for validation. if await n.validateContent(srcNodeId, contentKeys, contentItems): + portal_offer_validation_successful.inc( + labelValues = [$n.portalProtocol.protocolId] + ) + discard await n.portalProtocol.neighborhoodGossip( srcNodeId, contentKeys, contentItems ) + else: + portal_offer_validation_failed.inc(labelValues = [$n.portalProtocol.protocolId]) except CancelledError: trace "contentQueueWorker canceled" diff --git a/portal/network/state/state_network.nim b/portal/network/state/state_network.nim index b54c19dd98..5e3886f722 100644 --- a/portal/network/state/state_network.nim +++ b/portal/network/state/state_network.nim @@ -26,11 +26,6 @@ export results, state_content, hashes logScope: topics = "portal_state" -declareCounter state_network_offers_success, - "Portal state network offers successfully validated", labels = ["protocol_id"] -declareCounter state_network_offers_failed, - "Portal state network offers which failed validation", labels = ["protocol_id"] - const pingExtensionCapabilities = {CapabilitiesType, BasicRadiusType} type StateNetwork* = ref object @@ -249,17 +244,22 @@ proc contentQueueWorker(n: StateNetwork) {.async: (raises: []).} = ) if offerRes.isOk(): - state_network_offers_success.inc(labelValues = [$n.portalProtocol.protocolId]) + portal_offer_validation_successful.inc( + labelValues = [$n.portalProtocol.protocolId] + ) debug "Received offered content validated successfully", srcNodeId, contentKeyBytes else: + portal_offer_validation_failed.inc( + labelValues = [$n.portalProtocol.protocolId] + ) + error "Received offered content failed validation", + srcNodeId, contentKeyBytes, error = offerRes.error() + if srcNodeId.isSome(): n.portalProtocol.banNode( srcNodeId.get(), NodeBanDurationOfferFailedValidation ) - state_network_offers_failed.inc(labelValues = [$n.portalProtocol.protocolId]) - error "Received offered content failed validation", - srcNodeId, contentKeyBytes, error = offerRes.error() # The content validation failed so drop the remaining content (if any) from # this offer, because the remainly content is also likely to fail validation. diff --git a/portal/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim index f3ca3b35d0..249f87c15c 100644 --- a/portal/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -126,6 +126,11 @@ declareHistogram portal_offer_log_distance, labels = ["protocol_id"], buckets = distanceBuckets +declarePublicCounter portal_offer_validation_successful, + "Portal sub-network offers successfully validated", labels = ["protocol_id"] +declarePublicCounter portal_offer_validation_failed, + "Portal sub-network offers which failed validation", labels = ["protocol_id"] + logScope: topics = "portal_wire" @@ -328,7 +333,7 @@ proc banNode*(p: PortalProtocol, nodeId: NodeId, period: chronos.Duration) = proc isBanned*(p: PortalProtocol, nodeId: NodeId): bool = p.config.disableBanNodes == false and p.routingTable.isBanned(nodeId) -func `$`(id: PortalProtocolId): string = +func `$`*(id: PortalProtocolId): string = id.toHex() func fromNodeStatus(T: type NodeAddResult, status: NodeStatus): T = From 020475bfc886370bcaabe0a1e1c96cd5400af319 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 2 Jun 2025 15:36:24 +0700 Subject: [PATCH 061/138] Remove not used EVMC flags from execution_client CI (#3355) --- .github/workflows/ci.yml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e64ebdbec8..7ea027aaaf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,13 +36,10 @@ jobs: target: - os: linux cpu: amd64 - evmc: nimvm - os: windows cpu: amd64 - evmc: nimvm - os: macos cpu: arm64 - evmc: nimvm include: - target: os: linux @@ -58,7 +55,7 @@ jobs: run: shell: bash - name: '${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ matrix.target.evmc }}' + name: '${{ matrix.target.os }}-${{ matrix.target.cpu }}' runs-on: ${{ matrix.builder }} steps: - name: Checkout nimbus-eth1 @@ -159,7 +156,7 @@ jobs: uses: actions/cache@v4 with: path: NimBinCache - key: 'nim-${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ steps.versions.outputs.nimbus_build_system }}-${{ matrix.target.evmc }}' + key: 'nim-${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ steps.versions.outputs.nimbus_build_system }}' - name: Build Nim and Nimbus-eth1 dependencies run: | @@ -169,7 +166,7 @@ jobs: if: runner.os == 'Windows' run: | gcc --version - DEFAULT_MAKE_FLAGS="-j${ncpu} ENABLE_EVMC=${ENABLE_EVMC} ENABLE_VMLOWMEM=${ENABLE_VMLOWMEM}" + DEFAULT_MAKE_FLAGS="-j${ncpu} ENABLE_VMLOWMEM=${ENABLE_VMLOWMEM}" mingw32-make ${DEFAULT_MAKE_FLAGS} all test_import build_fuzzers build/nimbus_execution_client.exe --help # give us more space @@ -181,7 +178,7 @@ jobs: if: runner.os == 'Linux' run: | export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" - DEFAULT_MAKE_FLAGS="-j${ncpu} ENABLE_EVMC=${ENABLE_EVMC}" + DEFAULT_MAKE_FLAGS="-j${ncpu}" env CC=gcc make ${DEFAULT_MAKE_FLAGS} all test_import build_fuzzers build/nimbus_execution_client --help # CC, GOARCH, and CGO_ENABLED are needed to select correct compiler 32/64 bit @@ -196,7 +193,7 @@ jobs: if: runner.os == 'Macos' run: | export ZERO_AR_DATE=1 # avoid timestamps in binaries - DEFAULT_MAKE_FLAGS="-j${ncpu} ENABLE_EVMC=${ENABLE_EVMC}" + DEFAULT_MAKE_FLAGS="-j${ncpu}" make ${DEFAULT_MAKE_FLAGS} all test_import build_fuzzers build/nimbus_execution_client --help # "-static" option will not work for osx unless static system libraries are provided From 541377731e676a41055f3891bb47adf53098d706 Mon Sep 17 00:00:00 2001 From: Anton Iakimov Date: Mon, 2 Jun 2025 10:37:20 +0200 Subject: [PATCH 062/138] hive: remove ubuntu 20.04 mention (#3344) Ubuntu 20.04 is EOL since 31 May 2025 --- hive_integration/README.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/hive_integration/README.md b/hive_integration/README.md index 2c2e83aca0..6fd2ac0982 100644 --- a/hive_integration/README.md +++ b/hive_integration/README.md @@ -18,11 +18,6 @@ Practically, if using an Ubuntu Linux and you want to use the version of Go shipped with Ubuntu, you will need Ubuntu 21.04 or later. It's enough to run `apt-get install golang`. -If using Ubuntu 20.04 LTS (likely because it's the long-term stable version), -the shipped Go isn't recent enough, and there will be build errors. You can -either install a non-Ubuntu packaged version of Go (maybe from -[`golang.org`](https://golang.org/), or use a more recent Ubuntu. - If you want to run Hive in a Linux container, you will need Docker to work in the container because Hive calls Docker (a lot!). This is sometimes called "Docker in Docker". Inside LXD containers, Docker doesn't work by default, but From af07a5366d059f6a4da063c41ad8defd283f760d Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Mon, 2 Jun 2025 12:52:52 +0000 Subject: [PATCH 063/138] Beacon sync reorg headers download processing (#3359) * Update/generalise last-slow-peer management why With slow peer management, the last remaining sync peer is never zombified if it is *slow* but delivers some data. This was implemented for the blocks download only, now extended to headers download. * Reorg headers download and stashing on header chain cache why Headers download and stashing on header chain cache is now updated after how it is done for blocks (which was originally re-modelled somehow after the headers download in PR #3306.) Apart from a code cleanup, the main change is that each queued record will now hold only a single sync peer response (previously this was a list of several concatenated responses.) * Remove restriction on the number of sync peers why This restriction is a legacy construct which was used for + allowing to run on a single peer for testing + implicitly restrict the header and block queues when the size was restricted by a high-water-mark rather than a strict upper bound. * Reduce number of headers requested at a time via ethXX to 800 why This reduces some some need for in-memory cache space. When downloading 22.6m headers from `mainnet` with download request size 1024 one can make it in just under a hour on a well exposed site (so that enough peers are available.) Reducing the request size to 800 one gets just some minutes over an hour. * Update copyright year --- execution_chain/sync/beacon/worker.nim | 6 +- .../worker/blocks_staged/bodies_fetch.nim | 4 +- .../sync/beacon/worker/headers_staged.nim | 127 ++++----- .../worker/headers_staged/headers_fetch.nim | 13 +- .../worker/headers_staged/staged_collect.nim | 241 ------------------ .../worker/headers_staged/staged_headers.nim | 111 +++++++- .../sync/beacon/worker/start_stop.nim | 2 +- execution_chain/sync/beacon/worker/update.nim | 3 + execution_chain/sync/beacon/worker_const.nim | 27 +- execution_chain/sync/beacon/worker_desc.nim | 39 ++- 10 files changed, 186 insertions(+), 387 deletions(-) delete mode 100644 execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim diff --git a/execution_chain/sync/beacon/worker.nim b/execution_chain/sync/beacon/worker.nim index 42028b1257..1967a5412a 100644 --- a/execution_chain/sync/beacon/worker.nim +++ b/execution_chain/sync/beacon/worker.nim @@ -60,10 +60,6 @@ proc start*(buddy: BeaconBuddyRef; info: static[string]): bool = peer = buddy.peer ctx = buddy.ctx - if runsThisManyPeersOnly <= buddy.ctx.pool.nBuddies: - if not ctx.hibernate: debug info & ": peers limit reached", peer - return false - if not ctx.pool.seenData and buddy.peerID in ctx.pool.failedPeers: if not ctx.hibernate: debug info & ": useless peer already tried", peer return false @@ -73,7 +69,7 @@ proc start*(buddy: BeaconBuddyRef; info: static[string]): bool = return false if not ctx.hibernate: debug info & ": new peer", - peer, nSyncPeers=buddy.ctx.pool.nBuddies + peer, nSyncPeers=ctx.pool.nBuddies true proc stop*(buddy: BeaconBuddyRef; info: static[string]) = diff --git a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim index c6d2a438c7..0ab6cc568a 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim +++ b/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim @@ -32,7 +32,7 @@ proc fetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = # It would have been zombified if it were not the last one. This can be # used in functions -- depending on context -- that will trigger if the # if the pool of available sync peers becomes empty. - buddy.ctx.pool.blkLastSlowPeer = Opt.some(buddy.peerID) + buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) else: buddy.ctrl.zombie = true # abandon slow peer unless last one @@ -97,7 +97,7 @@ proc bodiesFetch*( buddy.fetchRegisterError(slowPeer=true) else: buddy.only.nRespErrors.blk = 0 # reset error count - buddy.ctx.pool.blkLastSlowPeer = Opt.none(Hash) # not last one or not error + buddy.ctx.pool.lastSlowPeer = Opt.none(Hash) # not last one or not error trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, elapsed=elapsed.toStr, syncState=($buddy.syncState), diff --git a/execution_chain/sync/beacon/worker/headers_staged.nim b/execution_chain/sync/beacon/worker/headers_staged.nim index e31bc99b81..8a70a36730 100644 --- a/execution_chain/sync/beacon/worker/headers_staged.nim +++ b/execution_chain/sync/beacon/worker/headers_staged.nim @@ -15,7 +15,7 @@ import pkg/eth/common, pkg/stew/[interval_set, sorted_set], ../worker_desc, - ./headers_staged/[headers_fetch, staged_collect, staged_headers], + ./headers_staged/[headers_fetch, staged_headers], ./headers_unproc # ------------------------------------------------------------------------------ @@ -27,7 +27,7 @@ func headersStagedCollectOk*(buddy: BeaconBuddyRef): bool = if buddy.ctrl.running: let ctx = buddy.ctx if 0 < ctx.headersUnprocAvail() and - not ctx.collectModeStopped(): + not ctx.headersModeStopped(): return true false @@ -93,47 +93,29 @@ proc headersStagedCollect*( discard ctx.headersUnprocFetch(top - dangling).expect("iv") let - # Reserve the full range of block numbers so they can be appended in a - # row. This avoid some fragmentation when header chains are stashed by - # multiple peers, i.e. they interleave peer task-wise. - iv = ctx.headersUnprocFetch(nFetchHeadersBatchListLen).valueOr: - break fetchHeadersBody # done, exit this function - # Get parent hash from the most senior stored header parent = ctx.hdrCache.antecedent.parentHash - # Fetch headers and store them on the header chain cache. The function - # returns the last unprocessed block number - bottom = await buddy.collectAndStashOnDiskCache(iv, parent, info) - - # Check whether there were some headers fetched at all - if bottom < iv.maxPt: - nStored += (iv.maxPt - bottom) # statistics - ctx.pool.seenData = true # header data exist + # Fetch some headers + rev = (await buddy.headersFetch( + parent, nFetchHeadersRequest, info)).valueOr: + break fetchHeadersBody # error => exit block - # Job might have been cancelled or completed while downloading headers. - # If so, no more bookkeeping of headers must take place. The *books* - # might have been reset and prepared for the next stage. - if ctx.collectModeStopped(): - trace info & ": stopped fetching/storing headers", peer, iv, - bottom=bottom.bnStr, nStored, syncState=($buddy.syncState) - break fetchHeadersBody # done, exit this function + ctx.pool.seenData = true # header data exist - # Commit partially processed block numbers - if iv.minPt <= bottom: - ctx.headersUnprocCommit(iv,iv.minPt,bottom) # partial success only - break fetchHeadersBody # done, exit this function + # Store it on the header chain cache + let dTop = ctx.hdrCache.antecedent.number # current antecedent + if not buddy.headersStashOnDisk(rev, buddy.peerID, info): + break fetchHeadersBody # error => exit block - ctx.headersUnprocCommit(iv) # all headers processed + let dBottom = ctx.hdrCache.antecedent.number # update new antecedent + nStored += (dTop - dBottom) # statistics - debug info & ": fetched headers count", peer, - unprocTop=ctx.headersUnprocAvailTop.bnStr, - D=ctx.hdrCache.antecedent.bnStr, nStored, nStagedQ=ctx.hdr.staged.len, - syncState=($buddy.syncState) + if dBottom == dTop: + break fetchHeadersBody # nothing achieved - # Buddy might have been cancelled while downloading headers. - if buddy.ctrl.stopped: - break fetchHeadersBody + if buddy.ctrl.stopped: # peer was cancelled + break fetchHeadersBody # done, exit this block # End while: `collectAndStashOnDiskCache()` @@ -141,44 +123,26 @@ proc headersStagedCollect*( # fetched headers need to be staged and checked/serialised later. if ctx.hdr.staged.len + ctx.hdr.reserveStaged < headersStagedQueueLengthMax: - let - # Comment see deterministic case - iv = ctx.headersUnprocFetch(nFetchHeadersBatchListLen).valueOr: - break fetchHeadersBody # done, exit this function - - # This record will accumulate the fetched headers. It must be on the - # heap so that `async` can capture that properly. - lhc = (ref LinkedHChain)(peerID: buddy.peerID) - - # Fetch headers and fill up the headers list of `lhc`. The function - # returns the last unprocessed block number. + # Fetch headers ctx.hdr.reserveStaged.inc # Book a slot on `staged` - let bottom = await buddy.collectAndStageOnMemQueue(iv, lhc, info) + let rc = await buddy.headersFetch( + EMPTY_ROOT_HASH, nFetchHeadersRequest, info) ctx.hdr.reserveStaged.dec # Free that slot again - nQueued = lhc.revHdrs.len # statistics + if rc.isErr: + break fetchHeadersBody # done, exit this block - # Job might have been cancelled or completed while downloading headers. - # If so, no more bookkeeping of headers must take place. The *books* - # might have been reset and prepared for the next stage. - if ctx.collectModeStopped(): - trace info & ": stopped fetching/staging headers", peer, iv, - bottom=bottom.bnStr, nStored, syncState=($buddy.syncState) - break fetchHeadersBody # done, exit this function - - # Store `lhc` chain on the `staged` queue if there is any - if 0 < lhc.revHdrs.len: - let qItem = ctx.hdr.staged.insert(iv.maxPt).valueOr: - raiseAssert info & ": duplicate key on staged queue iv=" & $iv - qItem.data = lhc[] - - # Commit processed block numbers - if iv.minPt <= bottom: - ctx.headersUnprocCommit(iv,iv.minPt,bottom) # partial success only - break fetchHeadersBody # done, exit this function + let + # Insert headers list on the `staged` queue + key = rc.value[0].number + qItem = ctx.hdr.staged.insert(key).valueOr: + raiseAssert info & ": duplicate key on staged queue" & + " iv=" & (rc.value[^1].number,key).bnStr + qItem.data.revHdrs = rc.value + qItem.data.peerID = buddy.peerID - ctx.headersUnprocCommit(iv) # all headers processed - # End inner block + nQueued = rc.value.len # statistics + # End if # End block: `fetchHeadersBody` @@ -196,9 +160,10 @@ proc headersStagedCollect*( return info "Queued/staged or DB/stored headers", - unprocTop=(if ctx.collectModeStopped(): "n/a" + unprocTop=(if ctx.headersModeStopped(): "n/a" else: ctx.headersUnprocAvailTop.bnStr), - nQueued, nStored, nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies + nQueued, nStored, nStagedQ=ctx.hdr.staged.len, + nSyncPeers=ctx.pool.nBuddies proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = @@ -216,22 +181,22 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = return false # switch peer var - nStored = 0 # statistics - switchPeer = false # for return code + nStored = 0u64 # statistics + switchPeer = false # for return code while ctx.hdrCache.state == collecting: # Fetch list with largest block numbers let qItem = ctx.hdr.staged.le(high BlockNumber).valueOr: - break # all done + break # all done let minNum = qItem.data.revHdrs[^1].number maxNum = qItem.data.revHdrs[0].number dangling = ctx.hdrCache.antecedent.number if maxNum + 1 < dangling: - debug info & ": gap, serialisation postponed", peer, - qItem=qItem.data.bnStr, D=dangling.bnStr, nStored, + trace info & ": gap, serialisation postponed", peer, + qItem=qItem.data.revHdrs.bnStr, D=dangling.bnStr, nStored, nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies switchPeer = true # there is a gap -- come back later break @@ -240,18 +205,14 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = discard ctx.hdr.staged.delete(qItem.key) # Store headers on database - if not buddy.headersStashOnDisk(qItem.data.revHdrs, info): - # Error mark buddy that produced that unusable headers list - ctx.incHdrProcErrors qItem.data.peerID - + if not buddy.headersStashOnDisk( + qItem.data.revHdrs, qItem.data.peerID, info): ctx.headersUnprocAppend(minNum, maxNum) switchPeer = true break - # Antecedent `dangling` of the header cache might not be at `revHdrs[^1]`. - let revHdrsLen = maxNum - ctx.hdrCache.antecedent.number + 1 - - nStored += revHdrsLen.int # count headers + # Antecedent of the header cache might not be at `revHdrs[^1]`. + nStored += (maxNum - ctx.hdrCache.antecedent.number + 1) # count headers # End while loop if 0 < nStored: diff --git a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim index bfed3d065d..c2891f5e57 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim @@ -24,7 +24,13 @@ import proc registerError(buddy: BeaconBuddyRef, slowPeer = false) = buddy.only.nRespErrors.hdr.inc if nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr: - if 1 < buddy.ctx.pool.nBuddies or not slowPeer: + if buddy.ctx.pool.nBuddies == 1 and slowPeer: + # Remember that the current peer is the last one and is lablelled slow. + # It would have been zombified if it were not the last one. This can be + # used in functions -- depending on context -- that will trigger if the + # if the pool of available sync peers becomes empty. + buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) + else: buddy.ctrl.zombie = true # abandon slow peer unless last one # ------------------------------------------------------------------------------ @@ -131,9 +137,10 @@ proc headersFetchReversed*( # mimimum share of the number of requested headers expected, typically 10%. if fetchHeadersErrTimeout < elapsed or h.len.uint64 * 100 < req.maxResults * fetchHeadersMinResponsePC: - buddy.registerError() + buddy.registerError(slowPeer=true) else: - buddy.only.nRespErrors.hdr = 0 # reset error count + buddy.only.nRespErrors.hdr = 0 # reset error count + buddy.ctx.pool.lastSlowPeer = Opt.none(Hash) # not last one or not error trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, ivResp=BnRange.new(h[^1].number,h[0].number), diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim deleted file mode 100644 index 6c731c5ff7..0000000000 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_collect.nim +++ /dev/null @@ -1,241 +0,0 @@ -# Nimbus -# Copyright (c) 2023-2025 Status Research & Development GmbH -# Licensed and distributed under either of -# * MIT license (license terms in the root directory or at -# https://opensource.org/licenses/MIT). -# * Apache v2 license (license terms in the root directory or at -# https://www.apache.org/licenses/LICENSE-2.0). -# at your option. This file may not be copied, modified, or distributed -# except according to those terms. - -{.push raises:[].} - -import - pkg/[chronicles, chronos], - pkg/eth/common, - pkg/stew/interval_set, - ../../worker_desc, - ./[headers_fetch, staged_headers] - -# ------------------------------------------------------------------------------ -# Private helpers -# ------------------------------------------------------------------------------ - -proc fetchRev( - buddy: BeaconBuddyRef; - ivReq: BnRange; - parent: Hash32; - info: static[string]; - ): Future[Result[seq[Header],void]] - {.async: (raises: []).} = - ## Helper/wrapper - var rev = (await buddy.headersFetchReversed(ivReq, parent, info)).valueOr: - buddy.headersUpdateBuddyErrorState() - debug info & ": header fetch error", peer=buddy.peer, ivReq, - nReq=ivReq.len, parent=parent.toStr, syncState=($buddy.syncState), - hdrErrors=buddy.hdrErrors - return err() - ok(rev) - - -proc subRangeMinEndingAt(iv: BnRange; maxPt: BlockNumber): BlockNumber = - ## Get the left end of reasonably sized sub-interval of argument `iv` - ## ending at argument `maxPt`. - if maxPt + 1 <= iv.minPt + nFetchHeadersRequest: - iv.minPt - else: - maxPt - nFetchHeadersRequest + 1 - -# ------------------------------------------------------------------------------ -# Public logging helpers -# ------------------------------------------------------------------------------ - -func bnStr*(w: LinkedHChain | ref LinkedHChain): string = - w.revHdrs.bnStr - -# ------------------------------------------------------------------------------ -# Public helper functions -# ------------------------------------------------------------------------------ - -func collectModeStopped*(ctx: BeaconCtxRef): bool = - ## Helper, checks whether there is a general stop conditions based on - ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) - ctx.poolMode or - ctx.pool.lastState != headers or - ctx.hdrCache.state != collecting - - -proc collectAndStashOnDiskCache*( - buddy: BeaconBuddyRef; - iv: BnRange; - topLink: Hash32; - info: static[string]; - ): Future[BlockNumber] {.async: (raises: []).} = - ## Fetch header interval deterministically by hash and store it directly - ## on the header chain cache. - ## - ## The function returns the largest block number not fetched/stored. - ## - let - ctx = buddy.ctx - peer = buddy.peer - var - ivTop = iv.maxPt # top end of the current range to fetch - parent = topLink # parent hash for the next fetch request - - block fetchHeadersBody: - - while ctx.hdrCache.state == collecting: - let - # Figure out base point for top-most sub-range of argument `iv` - ivReqMin = iv.subRangeMinEndingAt ivTop - - # Request interval - ivReq = BnRange.new(ivReqMin, ivTop) - - # Fetch headers for this range of block numbers - rev = (await buddy.fetchRev(ivReq, parent, info)).valueOr: - break fetchHeadersBody # error => exit block - - # Job might have been cancelled while downloading headrs - if ctx.collectModeStopped(): - break fetchHeadersBody # stop => exit block - - # Store it on the header chain cache - if not buddy.headersStashOnDisk(rev, info): - break fetchHeadersBody # error => exit block - - # Note that `put()` might not have used all of the `rev[]` items for - # updating the antecedent (aka `ctx.dangling`.) So `rev[^1]` might be - # an ancestor of the antecedent. - # - # By design, the unused items from `rev[]` list may savely be considered - # as consumed so that the next cycle can continue. In practice, if there - # are used `rev[]` items then the `state` will have changed which is - # handled in the `while` loop header clause. - # - # Other possibilities would imply that `put()` was called from several - # instances with oberlapping `rev[]` argument lists which is included - # by the administration of the `iv` argument for this function - # `collectAndStashOnDiskCache()`. - - # Update remaining range to fetch and check for end-of-loop condition - let newTopBefore = ivTop - BlockNumber(rev.len) - if newTopBefore < iv.minPt: - break # exit while() loop - - ivTop = newTopBefore # mostly results in `ivReq.minPt-1` - parent = rev[^1].parentHash # parent hash for next fetch request - # End loop - - trace info & ": fetched and stored headers", peer, iv, nHeaders=iv.len, - D=ctx.hdrCache.antecedent.bnStr, syncState=($buddy.syncState), - hdrErrors=buddy.hdrErrors - - # Reset header process errors (not too many consecutive failures this time) - buddy.nHdrProcErrors = 0 # all OK, reset error count - return iv.minPt-1 - - # Start processing some error or an incomplete fetch/store result - - trace info & ": partially fetched/stored headers", peer, - iv=(if ivTop < iv.maxPt: BnRange.new(ivTop+1,iv.maxPt).bnStr else: "n/a"), - nHeaders=(iv.maxPt-ivTop), D=ctx.hdrCache.antecedent.bnStr, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - - return ivTop # there is some left over range - - -proc collectAndStageOnMemQueue*( - buddy: BeaconBuddyRef; - iv: BnRange; - lhc: ref LinkedHChain; - info: static[string]; - ): Future[BlockNumber] {.async: (raises: []).} = - ## Fetch header interval opportunistically by hash and append it on the - ## `lhc` argument. - ## - ## The function returns the largest block number not fetched/stored. - ## - let - ctx = buddy.ctx - peer = buddy.peer - var - ivTop = iv.maxPt # top end of the current range to fetch - parent = EMPTY_ROOT_HASH # parent hash for next fetch request - - block fetchHeadersBody: - - while true: - let - # Figure out base point for top-most sub-range of argument `iv` - ivReqMin = iv.subRangeMinEndingAt ivTop - - # Request interval - ivReq = BnRange.new(ivReqMin, ivTop) - - # Fetch headers for this range of block numbers - rev = (await buddy.fetchRev(ivReq, parent, info)).valueOr: - break fetchHeadersBody # error => exit block - - # Job might have been cancelled while downloading headrs - if ctx.collectModeStopped(): - break fetchHeadersBody # stop => exit block - - # While assembling a `LinkedHChainRef`, only boundary checks are used to - # verify that the header lists are acceptable. A thorough check will be - # performed later when storing this list on the header chain cache. - - # Boundary check for block numbers - let ivBottom = ivTop - rev.len.uint64 + 1 - if rev[0].number != ivTop or rev[^1].number != ivBottom: - buddy.headersUpdateBuddyProcError() - debug info & ": header queue error", peer, iv, ivReq, - receivedHeaders=rev.bnStr, expected=(ivBottom,ivTop).bnStr, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - break fetchHeadersBody # error => exit block - - # Check/update hashes - let hash0 = rev[0].computeBlockHash - if lhc.revHdrs.len == 0: - lhc.hash = hash0 - else: - if lhc.revHdrs[^1].parentHash != hash0: - buddy.headersUpdateBuddyProcError() - debug info & ": header queue error", peer, iv, ivReq, - hash=hash0.toStr, expected=lhc.revHdrs[^1].parentHash.toStr, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - break fetchHeadersBody # error => exit block - - lhc.revHdrs &= rev - - # Update remaining range to fetch and check for end-of-loop condition - if ivTop < iv.minPt + rev.len.uint64: - break # exit while loop - - parent = rev[^1].parentHash # continue deterministically - ivTop -= rev.len.uint64 # mostly results in `ivReq.minPt-1` - # End loop - - trace info & ": fetched and staged all headers", peer, iv, - D=ctx.hdrCache.antecedent.bnStr, nHeaders=iv.len, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors - - # Reset header process errors (not too many consecutive failures this time) - buddy.nHdrProcErrors = 0 # all OK, reset error count - - return iv.minPt-1 # all fetched as instructed - # End block: `fetchHeadersBody` - - # Start processing some error or an incomplete fetch/stage result - - trace info & ": partially fetched and staged headers", peer, iv, - D=ctx.hdrCache.antecedent.bnStr, stagedHeaders=lhc.bnStr, - nHeaders=lhc.revHdrs.len, syncState=($buddy.syncState), - hdrErrors=buddy.hdrErrors - - return ivTop # there is some left over range - -# ------------------------------------------------------------------------------ -# End -# ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim b/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim index dcac96050f..26adb065fb 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim +++ b/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim @@ -15,6 +15,7 @@ import pkg/eth/common, pkg/stew/interval_set, ../../worker_desc, + ../headers_unproc, ./headers_fetch # ------------------------------------------------------------------------------ @@ -37,33 +38,123 @@ proc headersUpdateBuddyProcError*(buddy: BeaconBuddyRef) = # ----------------- +func headersModeStopped*(ctx: BeaconCtxRef): bool = + ## Helper, checks whether there is a general stop conditions based on + ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) + ctx.poolMode or + ctx.pool.lastState != headers or + ctx.hdrCache.state != collecting + + +proc headersFetch*( + buddy: BeaconBuddyRef; + parent: Hash32; + num: uint; + info: static[string]; + ): Future[Opt[seq[Header]]] + {.async: (raises: []).} = + ## From the p2p/ethXX network fetch as many headers as given as argument + ## `num`. The returned list will be in reverse order, i.e. the first header + ## is the most recent and the last one the most senior. + let + ctx = buddy.ctx + peer = buddy.peer + + # Make share that this sync peer is not banned from header processing, + # already + if nStashHeadersErrThreshold < buddy.nHdrProcErrors(): + buddy.ctrl.zombie = true + return Opt.none(seq[Header]) + + let + # Fetch next available interval + iv = ctx.headersUnprocFetch(num).valueOr: + return Opt.none(seq[Header]) # stop, exit function + + # Fetch headers for this range of block numbers + rc = await buddy.headersFetchReversed(iv, parent, info) + + # Job might have been cancelled or completed while downloading headers. + # If so, no more bookkeeping of headers must take place. The *books* + # might have been reset and prepared for the next stage. + if ctx.headersModeStopped(): + return Opt.none(seq[Header]) # stop, exit function + + if rc.isErr: + ctx.headersUnprocCommit(iv, iv) # clean up, revert `iv` + return Opt.none(seq[Header]) # stop, exit function + + # Boundary check for header block numbers + let + nHeaders = rc.value.len.uint64 + ivBottom = iv.maxPt - nHeaders + 1 + if rc.value[0].number != iv.maxPt or rc.value[^1].number != ivBottom: + buddy.headersUpdateBuddyProcError() + ctx.headersUnprocCommit(iv, iv) # clean up, revert `iv` + debug info & ": garbled header list", peer, iv, headers=rc.value.bnStr, + expected=(ivBottom,iv.maxPt).bnStr, syncState=($buddy.syncState), + hdrErrors=buddy.hdrErrors + return Opt.none(seq[Header]) # stop, exit function + + # Commit blocks received (and revert lower unused block numbers) + ctx.headersUnprocCommit(iv, iv.minPt, iv.maxPt - nHeaders) + return rc + + proc headersStashOnDisk*( buddy: BeaconBuddyRef; revHdrs: seq[Header]; + peerID: Hash; info: static[string]; ): bool = ## Convenience wrapper, makes it easy to produce comparable messages ## whenever it is called similar to `blocksImport()`. let ctx = buddy.ctx - d9 = ctx.hdrCache.antecedent.number # for logging - rc = ctx.hdrCache.put(revHdrs) + peer = buddy.peer + dTop = ctx.hdrCache.antecedent.number # current antecedent + rc = ctx.hdrCache.put(revHdrs) # verify and save headers if rc.isErr: - buddy.headersUpdateBuddyProcError() - debug info & ": header stash error", peer=buddy.peer, iv=revHdrs.bnStr, - syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, error=rc.error + # Mark peer that produced that unusable headers list as a zombie + ctx.setHdrProcFail peerID + + # Check whether it is enough to skip the current headers list, only + if ctx.subState.procFailNum != dTop: + ctx.subState.procFailNum = dTop # OK, this is a new block + ctx.subState.procFailCount = 1 + + else: + ctx.subState.procFailCount.inc # block num was seen, already + + # Cancel the whole download if needed + if nStashHeadersErrThreshold < ctx.subState.procFailCount: + ctx.subState.cancelRequest = true # So require queue reset + + # Proper logging .. + if ctx.subState.cancelRequest: + warn "Header stash error (cancel this session)", iv=revHdrs.bnStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, + hdrFailCount=ctx.subState.procFailCount, error=rc.error + else: + info "Header stash error (skip remaining)", iv=revHdrs.bnStr, + syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors, + hdrFailCount=ctx.subState.procFailCount, error=rc.error + + return false # stop - let d0 = ctx.hdrCache.antecedent.number - info "Cached headers", iv=(if d0 < d9: (d0,d9-1).bnStr else: "n/a"), - nHeaders=(d9 - d0), + let dBottom = ctx.hdrCache.antecedent.number # new antecedent + trace info & ": Serialised headers stashed", peer, + iv=(if dBottom < dTop: (dBottom,dTop-1).bnStr else: "n/a"), + nHeaders=(dTop - dBottom), nSkipped=(if rc.isErr: 0u64 - elif revHdrs[^1].number <= d0: (d0 - revHdrs[^1].number) + elif revHdrs[^1].number <= dBottom: (dBottom - revHdrs[^1].number) else: revHdrs.len.uint64), base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, target=ctx.subState.head.bnStr, targetHash=ctx.subState.headHash.short - rc.isOk + ctx.resetHdrProcErrors peerID # reset error count + true # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index d60684d0ec..37a5925f9e 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -94,7 +94,7 @@ proc startBuddy*(buddy: BeaconBuddyRef): bool = if acceptProto(eth69) or acceptProto(eth68): ctx.pool.nBuddies.inc - ctx.pool.blkLastSlowPeer = Opt.none(Hash) + ctx.pool.lastSlowPeer = Opt.none(Hash) buddy.initProcErrors() return true diff --git a/execution_chain/sync/beacon/worker/update.nim b/execution_chain/sync/beacon/worker/update.nim index 42b282f251..d21db5c0af 100644 --- a/execution_chain/sync/beacon/worker/update.nim +++ b/execution_chain/sync/beacon/worker/update.nim @@ -99,6 +99,9 @@ proc headersNext(ctx: BeaconCtxRef; info: static[string]): SyncState = limit=nFetchHeadersFailedInitialPeersThreshold return headersCancel + if ctx.subState.cancelRequest: + return headersCancel + if ctx.hdrCache.state == collecting: return SyncState.headers diff --git a/execution_chain/sync/beacon/worker_const.nim b/execution_chain/sync/beacon/worker_const.nim index c673207e44..63bf86f22a 100644 --- a/execution_chain/sync/beacon/worker_const.nim +++ b/execution_chain/sync/beacon/worker_const.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2021-2025 Status Research & Development GmbH +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at # https://opensource.org/licenses/MIT). @@ -26,20 +26,6 @@ const enableTicker* = false ## Log regular status updates similar to metrics. Great for debugging. - runsThisManyPeersOnly* = 8 - ## Set to `1` for running a single peer only at a time. Great for debugging. - ## - ## Otherwise, this setting limits the number of peers accepted by the - ## `runStart()` peer initialiser. When testing with an unlimited number of - ## peers with some double digit number of connected peers, the observed - ## response times when fetching headers seemed to degrade considerable into - ## seconds (rather than ms.) This will be further looked at to be confirmed - ## or rejected as insignificant. - ## - ## Note: - ## This setting has priority over the `maxPeers` setting of the - ## `BeaconSyncRef.init()` initaliser. - # ---------------------- metricsUpdateInterval* = chronos.seconds(10) @@ -69,7 +55,7 @@ const ## reset and suspened (waiting for the next activation to restart a new ## session.) - nFetchHeadersRequest* = 1_024 + nFetchHeadersRequest* = 800 ## Number of headers that will be requested with a single `eth/xx` message. ## ## On `Geth`, responses to larger requests are all truncted to 1024 header @@ -92,9 +78,9 @@ const ## so that the peers is not treated as a slow responder (see also above ## for slow responder timeout.) - nFetchHeadersBatchListLen* = 8 * nFetchHeadersRequest - ## Length of a request/stage batch list. Several headers are consecutively - ## fetched and stashed together as a single record on the staged queue. + nStashHeadersErrThreshold* = 2 + ## Abort headers download and the whole sync session with it if too many + ## failed header chain cache storage requests occur. headersStagedQueueLengthMax* = 8 ## If the staged header queue reaches this many queue objects for @@ -128,10 +114,7 @@ const # ---------------------- static: - doAssert 0 < runsThisManyPeersOnly - doAssert 0 < nFetchHeadersRequest - doAssert nFetchHeadersRequest <= nFetchHeadersBatchListLen doAssert 0 < headersStagedQueueLengthMax doAssert 0 < nFetchBodiesRequest diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index a688096d8f..95eda61e4f 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2021-2025 Status Research & Development GmbH +# Copyright (c) 2024-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at # https://opensource.org/licenses/MIT). @@ -33,12 +33,15 @@ type ## Block intervals sorted by largest block number. LinkedHChain* = object - ## Public block items for the `StagedHeaderQueue` list, indexed by the - ## largest block number. The list `revHdrs[]` is reversed, i.e. the largest - ## block number has the least index `0`. This makes it easier to grow the - ## sequence with parent headers, i.e. decreasing block numbers. + ## Headers list item. + ## + ## The list `revHdrs[]` is reversed, i.e. the largest block number has + ## the least index `0`. This makes it easier to grow the sequence with + ## parent headers, i.e. decreasing block numbers. + ## + ## The headers list item indexed by the greatest block number (i.e. by + ## `revHdrs[0]`.) ## - hash*: Hash32 ## Hash of `headers[0]` revHdrs*: seq[Header] ## Linked header chain, reversed peerID*: Hash ## For comparing peers @@ -113,7 +116,7 @@ type # Info, debugging, and error handling stuff nProcError*: Table[Hash,BuddyError] ## Per peer processing error - blkLastSlowPeer*: Opt[Hash] ## Register slow peer when last one + lastSlowPeer*: Opt[Hash] ## Register slow peer when the last one failedPeers*: HashSet[Hash] ## Detect dead end sync by collecting peers seenData*: bool ## Set `true` is data were fetched, already @@ -204,15 +207,6 @@ proc nHdrProcErrors*(buddy: BeaconBuddyRef): int = buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): return val.hdr.int -proc `nHdrProcErrors=`*(buddy: BeaconBuddyRef; count: uint8) = - ## Setter, set arbitrary `proc` error count for argument `buddy`. Due - ## to (hypothetical) hash collisions, the error register might have - ## vanished in case a new one is instantiated. - buddy.ctx.pool.nProcError.withValue(buddy.peerID, val): - val.hdr = count - do: - buddy.ctx.pool.nProcError[buddy.peerID] = (count,0u8) - proc incHdrProcErrors*(buddy: BeaconBuddyRef) = ## Increment `proc` error count for for argument `buddy`. Due to ## (hypothetical) hash collisions, the error register might have @@ -222,11 +216,16 @@ proc incHdrProcErrors*(buddy: BeaconBuddyRef) = do: buddy.ctx.pool.nProcError[buddy.peerID] = (1u8,0u8) -proc incHdrProcErrors*(ctx: BeaconCtxRef; peerID: Hash) = - ## Increment `proc` error count for for argument `peerID` entry if it - ## has a slot. Otherwise the instruction is ignored. +proc setHdrProcFail*(ctx: BeaconCtxRef; peerID: Hash) = + ## Set `proc` error count high enough so that the implied sync peer will + ## be zombified on the next attempt to download data. ctx.pool.nProcError.withValue(peerID, val): - val.hdr.inc + val.hdr = nProcHeadersErrThreshold + 1 + +proc resetHdrProcErrors*(ctx: BeaconCtxRef; peerID: Hash) = + ## Reset `proc` error count. + ctx.pool.nProcError.withValue(peerID, val): + val.hdr = 0 # ----- From f5db4e4b5ed34ba6a5ed37da848caf422735cad4 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Mon, 2 Jun 2025 21:44:06 +0200 Subject: [PATCH 064/138] ariso: assorted cleanups (#3360) Add some pretty printers here and there and remove the ones that are no longer used --- execution_chain/db/aristo/aristo_compute.nim | 5 +- execution_chain/db/aristo/aristo_debug.nim | 717 ------------------ execution_chain/db/aristo/aristo_desc.nim | 3 - .../aristo/aristo_desc/desc_identifiers.nim | 8 +- .../db/aristo/aristo_desc/desc_structural.nim | 52 +- execution_chain/db/aristo/aristo_merge.nim | 6 +- execution_chain/db/kvt/kvt_layers.nim | 20 - tests/test_aristo/test_blobify.nim | 25 +- 8 files changed, 65 insertions(+), 771 deletions(-) delete mode 100644 execution_chain/db/aristo/aristo_debug.nim diff --git a/execution_chain/db/aristo/aristo_compute.nim b/execution_chain/db/aristo/aristo_compute.nim index 4d32003f4a..16828e33b3 100644 --- a/execution_chain/db/aristo/aristo_compute.nim +++ b/execution_chain/db/aristo/aristo_compute.nim @@ -15,7 +15,7 @@ import chronicles, eth/common/[accounts_rlp, base_rlp, hashes_rlp], results, - "."/[aristo_desc, aristo_get], + "."/[aristo_desc, aristo_get, aristo_layers], ./aristo_desc/desc_backend type WriteBatch = tuple[writer: PutHdlRef, count: int, depth: int, prefix: uint64] @@ -85,8 +85,7 @@ proc putKeyAtLevel( else: debug "Writing computeKey cache", keys = batch.count, accounts = batch.progress else: - db.deltaAtLevel(level).sTab[rvid] = vtx - db.deltaAtLevel(level).kMap[rvid] = key + db.deltaAtLevel(level).layersPutKey(rvid, vtx, key) ok() diff --git a/execution_chain/db/aristo/aristo_debug.nim b/execution_chain/db/aristo/aristo_debug.nim deleted file mode 100644 index 4ff48385a2..0000000000 --- a/execution_chain/db/aristo/aristo_debug.nim +++ /dev/null @@ -1,717 +0,0 @@ -# nimbus-eth1 -# Copyright (c) 2023-2025 Status Research & Development GmbH -# Licensed under either of -# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or -# http://www.apache.org/licenses/LICENSE-2.0) -# * MIT license ([LICENSE-MIT](LICENSE-MIT) or -# http://opensource.org/licenses/MIT) -# at your option. This file may not be copied, modified, or distributed -# except according to those terms. - -{.push raises: [].} - -import - std/[algorithm, sequtils, sets, strutils, tables], - eth/common, - results, - stew/[byteutils, interval_set], - ./aristo_desc/desc_backend, - ./aristo_init/[memory_db, memory_only, rocks_db], - "."/[aristo_desc, aristo_get, aristo_layers, - aristo_serialise, aristo_utils] - -# ------------------------------------------------------------------------------ -# Private functions -# ------------------------------------------------------------------------------ - -func orDefault(db: AristoTxRef): AristoTxRef = - if db.isNil: AristoTxRef() else: db - -# -------------------------- - -func toHex(w: VertexID): string = - w.uint64.toHex - -func toHexLsb(w: int8): string = - $"0123456789abcdef"[w and 15] - -func sortedKeys(tab: Table): seq = - tab.keys.toSeq.sorted - -func sortedKeys(pPrf: HashSet): seq = - pPrf.toSeq.sorted - -func toPfx(indent: int; offset = 0): string = - if 0 < indent+offset: "\n" & " ".repeat(indent+offset) else: "" - -func squeeze(s: string; hex = false; ignLen = false): string = - ## For long strings print `begin..end` only - if hex: - let n = (s.len + 1) div 2 - result = if s.len < 20: s else: s[0 .. 5] & ".." & s[s.len-8 .. ^1] - if not ignLen: - result &= "[" & (if 0 < n: "#" & $n else: "") & "]" - elif s.len <= 30: - result = s - else: - result = if (s.len and 1) == 0: s[0 ..< 8] else: "0" & s[0 ..< 7] - if not ignLen: - result &= "..(" & $s.len & ")" - result &= ".." & s[s.len-16 .. ^1] - -func stripZeros(a: string; toExp = false): string = - if 0 < a.len: - result = a.strip(leading=true, trailing=false, chars={'0'}) - if result.len == 0: - result = "0" - elif result[^1] == '0' and toExp: - var n = 0 - while result[^1] == '0': - let w = result.len - result.setLen(w-1) - n.inc - if n == 1: - result &= "0" - elif n == 2: - result &= "00" - elif 2 < n: - result &= "↑" & $n - -# --------------------- - -func ppKeyOk( - db: AristoTxRef; - key: HashKey; - rvid: RootedVertexID; - ): string = - if key.isValid and rvid.isValid: - let rv = db.db.xMap.getOrVoid key - if rv.isValid: - if rvid != rv: - result = "(!)" - return - db.db.xMap[key] = rvid - -func ppVid(vid: VertexID; pfx = true): string = - if pfx: - result = "$" - if vid.isValid: - result &= vid.toHex.stripZeros.toLowerAscii - else: - result &= "ø" - -func ppVid(sid: StorageID; pfx = true): string = - if sid.isValid or not sid.vid.isValid: - sid.vid.ppVid(pfx) - else: - (if pfx: "$" else: "") & "®" & sid.vid.ppVid(false) - -func ppVid(rvid: RootedVertexID; pfx = true): string = - if pfx: - result = "$" - result &= ppVid(rvid.root, pfx=false) & ":" & ppVid(rvid.vid, pfx=false) - -func ppCodeHash(h: Hash32): string = - result = "¢" - if h == default(Hash32): - result &= "©" - elif h == EMPTY_CODE_HASH: - result &= "ø" - else: - result &= h.data.toHex.squeeze(hex=true,ignLen=true) - -func ppVidList(vLst: openArray[VertexID]): string = - result = "[" - if vLst.len <= 250: - result &= vLst.mapIt(it.ppVid).join(",") - else: - result &= vLst[0 .. 99].mapIt(it.ppVid).join(",") - result &= ",.." - result &= vLst[^100 .. ^1].mapIt(it.ppVid).join(",") - result &= "]" - -proc ppKey(key: HashKey; db: AristoTxRef; pfx = true): string = - if pfx: - result = "£" - if key.to(Hash32) == default(Hash32): - result &= "©" - elif not key.isValid: - result &= "ø" - else: - # Reverse lookup - let rvid = db.db.xMap.getOrVoid key - if rvid.isValid: - result &= rvid.ppVid(pfx=false) - let vtx = db.getVtx rvid - if vtx.isValid: - let rc = vtx.toNode(rvid.root, db) - if rc.isErr or key != rc.value.digestTo(HashKey): - result &= "≠" - else: - result &= "∞" - else: - let tag = if key.len < 32: "[#" & $key.len & "]" else: "" - result &= @(key.data).toHex.squeeze(hex=true,ignLen=true) & tag - -func ppPathPfx(pfx: NibblesBuf): string = - let s = $pfx - if s.len < 20: s else: s[0 .. 5] & ".." & s[s.len-8 .. ^1] & ":" & $s.len - -func ppNibble(n: int8): string = - if n < 0: "ø" elif n < 10: $n else: n.toHexLsb - -proc ppEthAccount(a: Account, db: AristoTxRef): string = - result = "(" - result &= ($a.nonce).stripZeros(toExp=true) & "," - result &= ($a.balance).stripZeros(toExp=true) & "," - result &= a.codeHash.ppCodeHash & "," - result &= a.storageRoot.to(HashKey).ppKey(db) & ")" - -func ppAriAccount(a: AristoAccount): string = - result = "(" - result &= ($a.nonce).stripZeros(toExp=true) & "," - result &= ($a.balance).stripZeros(toExp=true) & "," - result &= a.codeHash.ppCodeHash & ")" - -func ppPayload(p: LeafPayload, db: AristoTxRef): string = - case p.pType: - of AccountData: - result = "(" & p.account.ppAriAccount() & "," & p.stoID.ppVid & ")" - of StoData: - result = ($p.stoData).squeeze - -func ppVtx(nd: VertexRef, db: AristoTxRef, rvid: RootedVertexID): string = - if not nd.isValid: - result = "ø" - else: - if not rvid.isValid: - result = ["L(", "B("][nd.vType.ord] - elif db.layersGetKey(rvid).isOk: - result = ["l(", "b("][nd.vType.ord] - else: - result = ["ł(", "þ("][nd.vType.ord] - case nd.vType: - of Leaf: - result &= nd.pfx.ppPathPfx & "," & nd.lData.ppPayload(db) - of Branch: - result &= nd.pfx.ppPathPfx & ":" - for n in 0'u8..15'u8: - if nd.bVid(n).isValid: - result &= nd.bVid(n).ppVid - if n < 15: - result &= "," - result &= ")" - - -proc ppNode( - nd: NodeRef; - db: AristoTxRef; - rvid = default(RootedVertexID); - ): string = - if not nd.isValid: - result = "ø" - else: - if not rvid.isValid: - result = ["L(", "B("][nd.vtx.vType.ord] - elif db.layersGetKey(rvid).isOk: - result = ["l(", "b("][nd.vtx.vType.ord] - else: - result = ["ł(", "þ("][nd.vtx.vType.ord] - case nd.vtx.vType: - of Leaf: - result &= nd.vtx.pfx.ppPathPfx & "," - if nd.vtx.lData.pType == AccountData: - result &= "(" & nd.vtx.lData.account.ppAriAccount() & "," - if nd.vtx.lData.stoID.isValid: - let tag = db.ppKeyOk(nd.key[0],(rvid.root,nd.vtx.lData.stoID.vid)) - result &= nd.vtx.lData.stoID.ppVid & tag - else: - result &= nd.vtx.lData.stoID.ppVid - if nd.key[0].isValid: - result &= nd.key[0].ppKey(db) - result &= ")" - else: - result &= nd.vtx.lData.ppPayload(db) - of Branch: - let keyOnly = nd.vtx.subVids.toSeq.filterIt(it.isValid).len == 0 - result &= nd.vtx.pfx.ppPathPfx & ":" - for n in 0'u8..15'u8: - if nd.vtx.bVid(n).isValid: - let tag = db.ppKeyOk(nd.key[n],(rvid.root,nd.vtx.bVid(n))) - result &= nd.vtx.bVid(n).ppVid & tag - elif keyOnly and nd.key[n].isValid: - result &= nd.key[n].ppKey(db) - if n < 15: - result &= "," - result &= ")" - - -func ppXTab[T: VertexRef|NodeRef]( - tab: Table[RootedVertexID,T]; - db: AristoTxRef; - indent = 4; - ): string = - proc ppT(v: T; r: RootedVertexID): string = - when T is VertexRef: - v.ppVtx(db, r) - elif T is NodeRef: - v.ppNode(db, r) - "{" & tab.sortedKeys - .mapIt((it, tab.getOrDefault it)) - .mapIt("(" & it[0].ppVid & "," & it[1].ppT(it[0]) & ")") - .join(indent.toPfx(1)) & "}" - - -proc ppXMap*( - db: AristoTxRef; - kMap: Table[RootedVertexID,HashKey]; - indent: int; - ): string = - let pfx = indent.toPfx(1) - - # Sort keys by root, - # entry int: 0=no-key 1=no-vertex 2=cant-compile 3=key-mistmatch 4=key-ok - var keyLst: seq[(VertexID,seq[(VertexID,HashKey,int)])] - block: - var root = VertexID(0) - for w in kMap.sortedKeys: - if w.root != root: - keyLst.add (w.root,newSeq[typeof keyLst[0][1][0]](0)) - root = w.root - let - key = kMap.getOrVoid w - mode = block: - if key == VOID_HASH_KEY: - 0 - else: - db.db.xMap[key] = w - let vtx = db.getVtx(w) - if not vtx.isValid: - 1 - else: - let rc = vtx.toNode(w.root, db) - if rc.isErr: - 2 - elif key != rc.value.digestTo(HashKey): - 3 - else: - 4 - keyLst[^1][1].add (w.vid,key,mode) - - # Join increasing sequences for pretty printing - var keySubLst: seq[(VertexID,seq[seq[(VertexID,HashKey,int)]])] - for (root,rootQ) in keyLst: - var - q: seq[(VertexID,HashKey,int)] - subQ: seq[typeof q] - for (vid,key,state) in rootQ: - if q.len == 0: - q.add (vid,key,state) - continue - if q[^1][0]+1 == vid and q[^1][2] == state: - q.add (vid,key,state) - continue - # Otherwise new sub queue - subQ.add q - q = @[(vid,key,state)] - if 0 < q.len: - subQ.add q - keySubLst.add (root,subQ) - - proc pp(w: (VertexID,HashKey,int)): string = - proc pp(k: HashKey): string = - result = w[1].data.toHex.squeeze(hex=true,ignLen=true) - if k.len < 32: - result &= "[#" & $k.len & "]" - w[0].ppVid(pfx=false) & ( - case w[2]: - of 0: "=ø" - of 1: "∞" - of 2: "=" & w[1].pp() - of 3: "≠" & w[1].pp() - else: "") - - result &= "{" - - var qfx = "" - for (root,subQ) in keySubLst: - result &= qfx & "£" & root.ppVid(pfx=false) & ":" - qfx = pfx - var closeBracket = "" - if 1 < subQ.len or 1 < subQ[0].len: - result &= "[" - closeBracket = "]" - for q in subQ: - if q.len < 3: - result &= q.mapIt(it.pp).join(",") - else: - result &= q[0].pp & ".." & q[^1].pp - result &= "," - result.setLen(result.len - 1) - result &= closeBracket - - result &= "}" - - -proc ppBalancer( - fl: AristoTxRef; - db: AristoTxRef; - indent: int; - ): string = - ## Walk over filter tables - let - pfx = indent.toPfx - pfx1 = indent.toPfx(1) - pfx2 = indent.toPfx(2) - result = "" - if fl.isNil: - result &= " n/a" - return - result &= pfx & "vTop=" & fl.vTop.ppVid - result &= pfx & "sTab" & pfx1 & "{" - for n,vid in fl.sTab.sortedKeys: - let vtx = fl.sTab.getOrVoid vid - if 0 < n: result &= pfx2 - result &= $(1+n) & "(" & vid.ppVid & "," & vtx.ppVtx(db,vid) & ")" - result &= "}" & pfx & "kMap" & pfx1 & "{" - for n,vid in fl.kMap.sortedKeys: - let key = fl.kMap.getOrVoid vid - if 0 < n: result &= pfx2 - result &= $(1+n) & "(" & vid.ppVid & "," & key.ppKey(db) & ")" - result &= "}" - -proc ppBe[T](be: T; db: AristoTxRef; limit: int; indent: int): string = - ## Walk over backend tables - let - pfx = indent.toPfx - pfx1 = indent.toPfx(1) - pfx2 = indent.toPfx(2) - result = "<" & $be.kind & ">" - var (dump,dataOk) = ("",false) - block: - let rc = be.getTuvFn() - if rc.isOk: - dump &= pfx & "vTop=" & rc.value.ppVid - dataOk = true - block: - dump &= pfx & "sTab" - var (n, data) = (0, "") - for (vid,vtx) in be.walkVtx: - n.inc - if n < limit: - if 1 < n: data &= pfx2 - data &= $n & "(" & vid.ppVid & "," & vtx.ppVtx(db,vid) & ")" - elif n == limit: - data &= pfx2 & ".." - dump &= "(" & $n & ")" - if 0 < n: - dataOk = true - dump &= pfx1 - dump &= "{" & data & "}" - block: - dump &= pfx & "kMap" - var (n, data) = (0, "") - for (vid,key) in be.walkKey: - n.inc - if n < limit: - if 1 < n: data &= pfx2 - data &= $n & "(" & vid.ppVid & "," & key.ppKey(db) & ")" - elif n == limit: - data &= pfx2 & ".." - dump &= "(" & $n & ")" - if 0 < n: - dataOk = true - dump &= pfx1 - dump &= "{" & data & "}" - if dataOk: - result &= dump - else: - result &= "[]" - -proc ppLayer( - layer: AristoTxRef; - db: AristoTxRef; - vTopOk: bool; - sTabOk: bool; - kMapOk: bool; - indent = 4; - ): string = - let - pfx1 = indent.toPfx(1) - pfx2 = indent.toPfx(2) - nOKs = vTopOk.ord + sTabOk.ord + kMapOk.ord - tagOk = 1 < nOKs - var - pfy = "" - - proc doPrefix(s: string; dataOk: bool): string = - var rc: string - if tagOk: - rc = pfy - if 0 < s.len: - rc &= s & (if dataOk: pfx2 else: "") - pfy = pfx1 - else: - rc = pfy - pfy = pfx2 - rc - - if not layer.isNil: - if 2 < nOKs: - result &= "".doPrefix(false) - if vTopOk: - result &= "".doPrefix(true) & "vTop=" & layer.vTop.ppVid - if sTabOk: - let - tLen = layer.sTab.len - info = "sTab(" & $tLen & ")" - result &= info.doPrefix(0 < tLen) & layer.sTab.ppXTab(db,indent+2) - if kMapOk: - let - tLen = layer.kMap.len - info = "kMap(" & $tLen & ")" - result &= info.doPrefix(0 < tLen) - result &= db.ppXMap(layer.kMap, indent+2) - -# ------------------------------------------------------------------------------ -# Public functions -# ------------------------------------------------------------------------------ - -func pp*(w: Hash32; codeHashOk: bool): string = - if codeHashOk: - w.ppCodeHash - elif w == EMPTY_ROOT_HASH: - "EMPTY_ROOT_HASH" - elif w == default(Hash32): - "default(Hash32)" - else: - w.data.toHex.squeeze(hex=true,ignLen=true) - -func pp*(n: NibblesBuf): string = - n.ppPathPfx() - -proc pp*(w: HashKey; db = AristoTxRef(nil)): string = - w.ppKey(db.orDefault) - -proc pp*(w: Hash32; db = AristoTxRef(nil)): string = - w.to(HashKey).ppKey(db.orDefault) - -proc pp*(w: openArray[HashKey]; db = AristoTxRef(nil)): string = - "[" & @w.mapIt(it.ppKey(db.orDefault)).join(",") & "]" - -proc pp*(a: Account, db = AristoTxRef(nil)): string = - a.ppEthAccount(db.orDefault) - -func pp*(vid: VertexID): string = - vid.ppVid - -func pp*(rvid: RootedVertexID): string = - rvid.ppVid - -func pp*(vLst: openArray[VertexID]): string = - vLst.ppVidList - -func pp*(p: LeafPayload, db = AristoTxRef(nil)): string = - p.ppPayload(db.orDefault) - -func pp*(nd: VertexRef, db = AristoTxRef(nil)): string = - nd.ppVtx(db.orDefault, default(RootedVertexID)) - -proc pp*(nd: NodeRef, db = AristoTxRef(nil)): string = - nd.ppNode(db.orDefault, default(RootedVertexID)) - -func pp*(e: (VertexID,AristoError)): string = - "(" & e[0].pp & "," & $e[1] & ")" - -func pp*[T](rc: Result[T,(VertexID,AristoError)]): string = - if rc.isOk: - result = "ok(" - when T isnot void: - result &= ".." - result &= ")" - else: - result = "err(" & rc.error.pp & ")" - -func pp*( - sTab: Table[RootedVertexID,VertexRef]; - db = AristoTxRef(nil); - indent = 4; - ): string = - sTab.ppXTab(db.orDefault) - -proc pp*(leg: Leg; root: VertexID; db = AristoTxRef(nil)): string = - let db = db.orDefault() - result = "(" & leg.wp.vid.ppVid & "," - block: - let key = db.layersGetKeyOrVoid (root, leg.wp.vid) - if not key.isValid: - result &= "ø" - elif (root, leg.wp.vid) != db.db.xMap.getOrVoid key: - result &= key.ppKey(db) - result &= "," - if 0 <= leg.nibble: - result &= $leg.nibble.ppNibble - result &= "," & leg.wp.vtx.pp(db) & ")" - -proc pp*(hike: Hike; db = AristoTxRef(nil); indent = 4): string = - let - db = db.orDefault() - pfx = indent.toPfx(1) - result = "[" - if hike.legs.len == 0: - result &= "(" & hike.root.ppVid & ")" - else: - if hike.legs[0].wp.vid != hike.root: - result &= "(" & hike.root.ppVid & ")" & pfx - result &= hike.legs.mapIt(it.pp(hike.root, db)).join(pfx) - result &= pfx & "(" & hike.tail.ppPathPfx & ")" - result &= "]" - -func pp*[T: NodeRef|VertexRef|HashKey]( - q: seq[(HashKey,T)]; - db = AristoTxRef(nil); - indent = 4; - ): string = - let db = db.orDefault - proc ppT(v: T): string = - when T is VertexID or T is RootedVertexID: - v.pp() - else: - v.pp(db) - "{" & q.mapIt("(" & it[0].ppKey(db) & "," & it[1].ppT & ")") - .join("," & indent.toPfx(1)) & "}" - -func pp*[T: NodeRef|VertexRef|HashKey]( - t: Table[HashKey,T]; - db = AristoTxRef(nil); - indent = 4; - ): string = - ## Sort hash keys by associated vertex ID were possible - let db = db.orDefault - var - t0: Table[RootedVertexID,(HashKey,T)] - t1: Table[HashKey,T] - for (key,val) in t.pairs: - db.xMap.withValue(key,rv): - t0[rv[]] = (key,val) - do: - t1[key] = val - let - q0 = t0.sortedKeys.mapIt(t0.getOrDefault it) - q1 = t1.sortedKeys.mapIt((it, t1.getOrDefault it)) - (q0 & q1).pp(db,indent) - -proc pp*[T: HashKey]( - t: Table[T,RootedVertexID]; - db = AristoTxRef(nil); - indent = 4; - ): string = - ## Sort by second tab item vertex ID - let db = db.orDefault - proc ppT(v: T): string = - when T is VertexID or T is RootedVertexID: - v.pp() - else: - v.pp(db) - var rev: Table[RootedVertexID,seq[T]] - for (key,rvid) in t.pairs: - rev.withValue(rvid,val): - val[].add key - do: - rev[rvid] = @[key] - var flat: seq[(HashKey,RootedVertexID)] - for rvid in rev.keys.toSeq.sorted: - rev.withValue(rvid,keysPtr): - for key in keysPtr[]: - flat.add (key,rvid) - # Now sorted vy values - "{" & flat.mapIt("(" & it[0].ppT & "," & it[1].pp & ")") - .join("," & indent.toPfx(1)) & "}" - -func pp*[T: HashKey]( - t: TableRef[HashKey,T]; - db = AristoTxRef(nil); - indent = 4; - ): string = - pp(t[],db,indent) - -proc pp*( - kMap: Table[RootedVertexID,HashKey]; - db: AristoTxRef; - indent = 4; - ): string = - db.ppXMap(kMap, indent) - -# --------------------- - -func pp*(tx: AristoTxRef): string = - result = "(" & repr(pointer(addr(tx[]))) - if not tx.parent.isNil: - result &= ", par=" & pp(tx.parent) - result &= ")" - -func pp*(wp: VidVtxPair; db: AristoTxRef): string = - "(" & wp.vid.pp & "," & wp.vtx.pp(db) & ")" - - -proc pp*( - layer: AristoTxRef; - db: AristoTxRef; - indent = 4; - sTabOk = true, - kMapOk = true, - vTopOk = true, - ): string = - layer.ppLayer( - db.orDefault(), vTopOk=vTopOk, sTabOk=sTabOk, kMapOk=kMapOk, indent=indent) - -#proc pp*( -# be: BackendRef; -# db: AristoTxRef; -# limit = 100; -# indent = 4; -# ): string = -# result = db.ppBalancer(db, indent+1) & indent.toPfx -# case be.kind: -# of BackendMemory: -# result &= be.MemBackendRef.ppBe(db, limit, indent+1) -# of BackendRocksDB: -# result &= be.RdbBackendRef.ppBe(db, limit, indent+1) - -proc pp*( - db: AristoTxRef; - indent = 4; - backendOk = false; - balancerOk = true; - topOk = true; - stackOk = true; - kMapOk = true; - sTabOk = true; - limit = 100; - ): string = - # if topOk: - # result = db.layersCc.ppLayer( - # db, sTabOk=sTabOk, kMapOk=kMapOk, vTopOk=true, indent=indent) - # let stackOnlyOk = stackOk and not (topOk or balancerOk or backendOk) - # if not stackOnlyOk: - # result &= indent.toPfx(1) & "level=" & $db.stack.len - # if (stackOk and 0 < db.stack.len) or stackOnlyOk: - # let layers = @[db.top] & db.stack.reversed - # var lStr = "" - # for n,w in layers: - # let - # m = layers.len - n - 1 - # l = db.layersCc m - # a = w.kMap.values.toSeq.filterIt(not it.isValid).len - # c = l.kMap.values.toSeq.filterIt(not it.isValid).len - # result &= "(" & $(w.kMap.len - a) & "," & $a & ")" - # lStr &= " " & $m & "=(" & $(l.kMap.len - c) & "," & $c & ")" - # result &= " =>" & lStr - # if backendOk: - # result &= indent.toPfx & db.backend.pp(db, limit=limit, indent) - # elif balancerOk: - # result &= indent.toPfx & db.balancer.ppBalancer(db, indent+1) - discard #TODO -# ------------------------------------------------------------------------------ -# End -# ------------------------------------------------------------------------------ diff --git a/execution_chain/db/aristo/aristo_desc.nim b/execution_chain/db/aristo/aristo_desc.nim index c27ff3ad47..12a7508426 100644 --- a/execution_chain/db/aristo/aristo_desc.nim +++ b/execution_chain/db/aristo/aristo_desc.nim @@ -122,9 +122,6 @@ type ## Mixed account/storage path to payload cache - same as above but caches ## the full lookup of storage slots - # Debugging data below, might go away in future - xMap*: Table[HashKey,RootedVertexID] ## For pretty printing/debugging - staticLevel*: int ## MPT level where "most" leaves can be found, for static vid lookups lookups*: tuple[lower, hits, higher: int] diff --git a/execution_chain/db/aristo/aristo_desc/desc_identifiers.nim b/execution_chain/db/aristo/aristo_desc/desc_identifiers.nim index 233dd131b0..83a5095f0f 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_identifiers.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_identifiers.nim @@ -227,14 +227,10 @@ func append*(w: var RlpWriter; key: HashKey) = # ------------------------------------------------------------------------------ func `$`*(vids: seq[VertexID]): string = - "[" & vids.toSeq.mapIt( - "$" & it.uint64.toHex.strip(trailing=false,chars={'0'}) - ).join(",") & "]" + "[" & vids.mapIt($it).join(",") & "]" func `$`*(vids: HashSet[VertexID]): string = - "{" & vids.toSeq.sorted.mapIt( - "$" & it.uint64.toHex.strip(trailing=false,chars={'0'}) - ).join(",") & "}" + "{" & vids.toSeq.sorted.mapIt($it).join(",") & "}" func `$`*(key: HashKey): string = toHex(key.data) diff --git a/execution_chain/db/aristo/aristo_desc/desc_structural.nim b/execution_chain/db/aristo/aristo_desc/desc_structural.nim index 0ff7d2bbdb..4cd75bef35 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_structural.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_structural.nim @@ -15,7 +15,7 @@ {.push raises: [].} import - std/[hashes as std_hashes, tables], + std/[hashes as std_hashes, strutils, tables], stint, eth/common/[accounts, base, hashes], ./desc_identifiers @@ -252,6 +252,56 @@ template dup*(vtx: BranchRef): BranchRef = template dup*(vtx: ExtBranchRef): ExtBranchRef = ExtBranchRef(VertexRef(vtx).dup()) +func `$`*(aa: AristoAccount): string = + $aa.nonce & "," & $aa.balance & "," & + (if aa.codeHash == EMPTY_CODE_HASH: "" + else: $aa.codeHash) + +func `$`*(stoID: StorageID): string = + if stoID.isValid: + $stoID.vid + else: + $default(VertexID) + +func `$`*(vtx: AccLeafRef): string = + if vtx == nil: + "A(nil)" + else: + "A(" & $vtx.pfx & ":" & $vtx.account & "," & $vtx.stoID & ")" + +func `$`*(vtx: StoLeafRef): string = + if vtx == nil: + "S(nil)" + else: + "S(" & $vtx.pfx & ":" & $vtx.stoData & ")" + +func `$`*(vtx: BranchRef): string = + if vtx == nil: + "B(nil)" + else: + "B(" & $vtx.startVid & "+" & toBin(BiggestInt(vtx.used), 16) & ")" + +func `$`*(vtx: ExtBranchRef): string = + if vtx == nil: + "E(nil)" + else: + "E(" & $vtx.pfx & ":" & $vtx.startVid & "+" & toBin(BiggestInt(vtx.used), 16) & ")" + +func `$`*(vtx: VertexRef): string = + if vtx == nil: + "V(nil)" + else: + case vtx.vType + of AccLeaf: + $(AccLeafRef(vtx)[]) + of StoLeaf: + $(StoLeafRef(vtx)[]) + of Branch: + $(BranchRef(vtx)[]) + of ExtBranch: + $(ExtBranchRef(vtx)[]) + + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/aristo/aristo_merge.nim b/execution_chain/db/aristo/aristo_merge.nim index 16a015d9bf..42b42cc32b 100644 --- a/execution_chain/db/aristo/aristo_merge.nim +++ b/execution_chain/db/aristo/aristo_merge.nim @@ -69,13 +69,11 @@ proc mergePayloadImpl[LeafType, T]( template resetKeys() = # Reset cached hashes of touched verticies - for i in 2..vids.len: + for i in 1..vids.len: db.layersResKey((root, vids[^i]), vtxs[^i]) while pos < path.len: # Clear existing merkle keys along the traversal path - vids.add cur - vtxs.add vtx var psuffix = path.slice(pos) let n = psuffix.sharedPrefixLen(vtx.pfx) case vtx.vType @@ -141,6 +139,8 @@ proc mergePayloadImpl[LeafType, T]( next = BranchRef(vtx).bVid(nibble) if next.isValid: + vids.add cur + vtxs.add vtx cur = next psuffix = psuffix.slice(n + 1) pos += n + 1 diff --git a/execution_chain/db/kvt/kvt_layers.nim b/execution_chain/db/kvt/kvt_layers.nim index 6b7dff461d..19e850efb6 100644 --- a/execution_chain/db/kvt/kvt_layers.nim +++ b/execution_chain/db/kvt/kvt_layers.nim @@ -65,26 +65,6 @@ func layersPut*(db: KvtTxRef; key: openArray[byte]; data: openArray[byte]) = proc mergeAndReset*(trg, src: KvtTxRef) = mergeAndReset(trg.sTab, src.sTab) -# ------------------------------------------------------------------------------ -# Public functions -# ------------------------------------------------------------------------------ - -func layersCc*(db: KvtDbRef; level = high(int)): KvtTxRef = - ## Provide a collapsed copy of layers up to a particular transaction level. - ## If the `level` argument is too large, the maximum transaction level is - ## returned. For the result layer, the `txUid` value set to `0`. - # let layers = if db.stack.len <= level: db.stack & @[db.top] - # else: db.stack[0 .. level] - - # # Set up initial layer (bottom layer) - # result = LayerRef(sTab: layers[0].sTab) - - # # Consecutively merge other layers on top - # for n in 1 ..< layers.len: - # for (key,val) in layers[n].sTab.pairs: - # result.sTab[key] = val - discard # TODO - # ------------------------------------------------------------------------------ # Public iterators # ------------------------------------------------------------------------------ diff --git a/tests/test_aristo/test_blobify.nim b/tests/test_aristo/test_blobify.nim index 3f1a570854..63d53a446e 100644 --- a/tests/test_aristo/test_blobify.nim +++ b/tests/test_aristo/test_blobify.nim @@ -15,25 +15,14 @@ import unittest2, std/sequtils, ../../execution_chain/db/aristo/aristo_blobify suite "Aristo blobify": test "VertexRef roundtrip": let - leafAccount = AccLeafRef( - vType: AccLeaf, - pfx: NibblesBuf.nibble(1), - account: AristoAccount(nonce: 100, balance: 123.u256), - stoID: (isValid: true, vid: VertexID(5)) - ) - leafStoData = StoLeafRef( - vType: StoLeaf, - pfx: NibblesBuf.nibble(3), - stoData: 42.u256, - ) - branch = BranchRef(vType: Branch, startVid: VertexID(0x334452), used: 0x43'u16) - - extension = ExtBranchRef( - vType: ExtBranch, - pfx: NibblesBuf.nibble(2), - startVid: VertexID(0x55), - used: 0x12'u16, + leafAccount = AccLeafRef.init( + NibblesBuf.nibble(1), + AristoAccount(nonce: 100, balance: 123.u256), + (isValid: true, vid: VertexID(5)), ) + leafStoData = StoLeafRef.init(NibblesBuf.nibble(3), 42.u256) + branch = BranchRef.init(VertexID(0x334452), 0x43'u16) + extension = ExtBranchRef.init(NibblesBuf.nibble(2), VertexID(0x55), 0x12'u16) key = HashKey.fromBytes(repeat(0x34'u8, 32))[] From ee990259c0f8491ab285a102774aab5f53c308e9 Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 3 Jun 2025 18:07:55 +0700 Subject: [PATCH 065/138] fusaka-devnet-0: EIP-7594 related changes (cell proof) (#3353) * fusaka-devnet-0: EIP-7594 related changes (cell proof) * More changes to txpool, beacon_engine, and engine_getPayloadV2/3/4 * Add test cases * Attempt to fix test * Raise ulimit to 2048Kb * Avoid blowing up the stack by converting blobs on heap * Oops * dang blob Okay, turn out the problem is with the handling of blobs. Don't iterate the blob using convenience iterator. Avoid blob conversion on the stack etc. --- .../beacon/api_handler/api_getpayload.nim | 10 +- execution_chain/beacon/beacon_engine.nim | 16 +- execution_chain/common/common.nim | 11 +- execution_chain/core/eip4844.nim | 18 +- execution_chain/core/eip7594.nim | 88 ++ execution_chain/core/pooled_txs.nim | 32 +- execution_chain/core/pooled_txs_rlp.nim | 42 +- execution_chain/core/tx_pool.nim | 36 +- execution_chain/core/tx_pool/tx_desc.nim | 18 +- .../nodocker/engine/cancun/blobs.nim | 18 +- .../nodocker/engine/tx_sender.nim | 42 + tests/config.nims | 22 + tests/nim.cfg | 17 - tests/pooled_tx/invalid_blobsbundle_type.rlp | 1 + tests/pooled_tx/ptx0.rlp | 1 + tests/pooled_tx/ptx1.rlp | 1 + tests/pooled_tx/ptx2.rlp | 1 + tests/pooled_tx/ptx3.rlp | 1 + tests/pooled_tx/ptx4.rlp | 1 + tests/pooled_tx/ptx5.rlp | 1 + tests/pooled_tx/ptx6.rlp | 1 + tests/pooled_tx/ptx7.rlp | 1 + tests/pooled_tx/ptx8.rlp | 1 + tests/test_pooled_tx.nim | 55 +- tests/test_txpool.nim | 1034 +++++++++-------- 25 files changed, 909 insertions(+), 560 deletions(-) create mode 100644 execution_chain/core/eip7594.nim create mode 100644 tests/config.nims delete mode 100644 tests/nim.cfg create mode 100644 tests/pooled_tx/invalid_blobsbundle_type.rlp create mode 100644 tests/pooled_tx/ptx0.rlp create mode 100644 tests/pooled_tx/ptx1.rlp create mode 100644 tests/pooled_tx/ptx2.rlp create mode 100644 tests/pooled_tx/ptx3.rlp create mode 100644 tests/pooled_tx/ptx4.rlp create mode 100644 tests/pooled_tx/ptx5.rlp create mode 100644 tests/pooled_tx/ptx6.rlp create mode 100644 tests/pooled_tx/ptx7.rlp create mode 100644 tests/pooled_tx/ptx8.rlp diff --git a/execution_chain/beacon/api_handler/api_getpayload.nim b/execution_chain/beacon/api_handler/api_getpayload.nim index 4acdc9b672..8066bd9b42 100644 --- a/execution_chain/beacon/api_handler/api_getpayload.nim +++ b/execution_chain/beacon/api_handler/api_getpayload.nim @@ -31,7 +31,7 @@ proc getPayload*(ben: BeaconEngineRef, raise unsupportedFork("getPayload" & $expectedVersion & " expect payload" & $expectedVersion & " but get payload" & $version) - if bundle.blobsBundle.isSome: + if bundle.blobsBundle.isNil.not: raise unsupportedFork("getPayload" & $expectedVersion & " contains unsupported BlobsBundleV1") @@ -50,7 +50,7 @@ proc getPayloadV3*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV3Response = let version = bundle.payload.version if version != Version.V3: raise unsupportedFork("getPayloadV3 expect payloadV3 but get payload" & $version) - if bundle.blobsBundle.isNone: + if bundle.blobsBundle.isNil: raise unsupportedFork("getPayloadV3 is missing BlobsBundleV1") let com = ben.com @@ -60,7 +60,7 @@ proc getPayloadV3*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV3Response = GetPayloadV3Response( executionPayload: bundle.payload.V3, blockValue: bundle.blockValue, - blobsBundle: bundle.blobsBundle.value, + blobsBundle: bundle.blobsBundle.V1, shouldOverrideBuilder: false ) @@ -74,7 +74,7 @@ proc getPayloadV4*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV4Response = let version = bundle.payload.version if version != Version.V3: raise unsupportedFork("getPayloadV4 expect payloadV3 but get payload" & $version) - if bundle.blobsBundle.isNone: + if bundle.blobsBundle.isNil: raise unsupportedFork("getPayloadV4 is missing BlobsBundleV1") if bundle.executionRequests.isNone: raise unsupportedFork("getPayloadV4 is missing executionRequests") @@ -86,7 +86,7 @@ proc getPayloadV4*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV4Response = GetPayloadV4Response( executionPayload: bundle.payload.V3, blockValue: bundle.blockValue, - blobsBundle: bundle.blobsBundle.value, + blobsBundle: bundle.blobsBundle.V1, shouldOverrideBuilder: false, executionRequests: bundle.executionRequests.get, ) diff --git a/execution_chain/beacon/beacon_engine.nim b/execution_chain/beacon/beacon_engine.nim index cf15431036..13c36720a7 100644 --- a/execution_chain/beacon/beacon_engine.nim +++ b/execution_chain/beacon/beacon_engine.nim @@ -17,18 +17,20 @@ import ./payload_conv, ./api_handler/api_utils, ../core/tx_pool, + ../core/pooled_txs, ../core/chain/forked_chain, ../core/chain/forked_chain/block_quarantine export forked_chain, - block_quarantine + block_quarantine, + pooled_txs type ExecutionBundle* = object payload*: ExecutionPayload blockValue*: UInt256 - blobsBundle*: Opt[BlobsBundleV1] + blobsBundle*: BlobsBundle executionRequests*: Opt[seq[seq[byte]]] BeaconEngineRef* = ref object @@ -172,17 +174,9 @@ proc generateExecutionBundle*(ben: BeaconEngineRef, if bundle.blk.header.extraData.len > 32: return err "extraData length should not exceed 32 bytes" - var blobsBundle: Opt[BlobsBundleV1] - if bundle.blobsBundle.isSome: - template blobData: untyped = bundle.blobsBundle.get - blobsBundle = Opt.some BlobsBundleV1( - commitments: blobData.commitments, - proofs: blobData.proofs, - blobs: blobData.blobs) - ok ExecutionBundle( payload: executionPayload(bundle.blk), - blobsBundle: blobsBundle, + blobsBundle: bundle.blobsBundle, blockValue: bundle.blockValue, executionRequests: bundle.executionRequests) diff --git a/execution_chain/common/common.nim b/execution_chain/common/common.nim index 0392be28c6..3d7c1be01d 100644 --- a/execution_chain/common/common.nim +++ b/execution_chain/common/common.nim @@ -315,16 +315,19 @@ func forkId*(com: CommonRef, head: BlockNumber, time: EthTime): ForkID {.gcsafe. com.forkIdCalculator.newID(head, time.uint64) func isEIP155*(com: CommonRef, number: BlockNumber): bool = - com.config.eip155Block.isSome and number >= com.config.eip155Block.get + com.config.eip155Block.isSome and number >= com.config.eip155Block.value func isShanghaiOrLater*(com: CommonRef, t: EthTime): bool = - com.config.shanghaiTime.isSome and t >= com.config.shanghaiTime.get + com.config.shanghaiTime.isSome and t >= com.config.shanghaiTime.value func isCancunOrLater*(com: CommonRef, t: EthTime): bool = - com.config.cancunTime.isSome and t >= com.config.cancunTime.get + com.config.cancunTime.isSome and t >= com.config.cancunTime.value func isPragueOrLater*(com: CommonRef, t: EthTime): bool = - com.config.pragueTime.isSome and t >= com.config.pragueTime.get + com.config.pragueTime.isSome and t >= com.config.pragueTime.value + +func isOsakaOrLater*(com: CommonRef, t: EthTime): bool = + com.config.osakaTime.isSome and t >= com.config.osakaTime.value proc proofOfStake*(com: CommonRef, header: Header, txFrame: CoreDbTxRef): bool = if com.config.posBlock.isSome: diff --git a/execution_chain/core/eip4844.nim b/execution_chain/core/eip4844.nim index 61c8533622..77ebc91b5a 100644 --- a/execution_chain/core/eip4844.nim +++ b/execution_chain/core/eip4844.nim @@ -41,8 +41,8 @@ const # kzgToVersionedHash implements kzg_to_versioned_hash from EIP-4844 -proc kzgToVersionedHash*(kzg: kzg.KzgCommitment): VersionedHash = - result = sha256.digest(kzg.bytes).to(Hash32) +proc kzgToVersionedHash*(commitment: array[48, byte]): VersionedHash = + result = sha256.digest(commitment).to(Hash32) result.data[0] = VERSIONED_HASH_VERSION_KZG # pointEvaluation implements point_evaluation_precompile from EIP-4844 @@ -67,7 +67,7 @@ proc pointEvaluation*(input: openArray[byte]): Result[void, string] = commitment = KzgBytes48.copyFrom(input, 96, 143) kzgProof = KzgBytes48.copyFrom(input, 144, 191) - if kzgToVersionedHash(commitment).data != versionedHash.bytes: + if kzgToVersionedHash(commitment.bytes).data != versionedHash.bytes: return err("versionedHash should equal to kzgToVersionedHash(commitment)") # Verify KZG proof @@ -139,7 +139,7 @@ func blobGasUsed(txs: openArray[Transaction]): uint64 = # https://eips.ethereum.org/EIPS/eip-4844 func validateEip4844Header*( com: CommonRef, header, parentHeader: Header, - txs: openArray[Transaction]): Result[void, string] {.raises: [].} = + txs: openArray[Transaction]): Result[void, string] = if not com.isCancunOrLater(header.timestamp): if header.blobGasUsed.isSome: @@ -175,10 +175,10 @@ func validateEip4844Header*( return ok() -proc validateBlobTransactionWrapper*(tx: PooledTransaction): - Result[void, string] {.raises: [].} = - if tx.blobsBundle.isNil: - return err("tx wrapper is none") +proc validateBlobTransactionWrapper4844*(tx: PooledTransaction): + Result[void, string] = + doAssert(tx.blobsBundle.isNil.not) + doAssert(tx.blobsBundle.wrapperVersion == WrapperVersionEIP4844) # note: assert blobs are not malformatted let goodFormatted = tx.tx.versionedHashes.len == @@ -213,7 +213,7 @@ proc validateBlobTransactionWrapper*(tx: PooledTransaction): if tx.tx.versionedHashes[i].data[0] != VERSIONED_HASH_VERSION_KZG: return err("wrong kzg version in versioned hash at index " & $i) - if tx.tx.versionedHashes[i] != kzgToVersionedHash(commitments[i]): + if tx.tx.versionedHashes[i] != kzgToVersionedHash(commitments[i].bytes): return err("tx versioned hash not match commitments at index " & $i) ok() diff --git a/execution_chain/core/eip7594.nim b/execution_chain/core/eip7594.nim new file mode 100644 index 0000000000..7ff8b6e54e --- /dev/null +++ b/execution_chain/core/eip7594.nim @@ -0,0 +1,88 @@ +# nimbus-execution-client +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +{.push raises: [].} + +import + ../constants, + ./eip4844, + ./pooled_txs, + /lazy_kzg as kzg + +from std/sequtils import mapIt + +proc validateBlobTransactionWrapper7594*(tx: PooledTransaction): + Result[void, string] = + doAssert(tx.blobsBundle.isNil.not) + doAssert(tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594) + + # note: assert blobs are not malformatted + let goodFormatted = tx.tx.versionedHashes.len == + tx.blobsBundle.commitments.len and + tx.tx.versionedHashes.len == + tx.blobsBundle.blobs.len + + if not goodFormatted: + return err("tx wrapper is ill formatted") + + let + expectedProofsLen = CELLS_PER_EXT_BLOB * tx.blobsBundle.blobs.len + getProofsLen = tx.blobsBundle.proofs.len + + if not getProofsLen == expectedProofsLen: + return err("cell proofs len mismatch, expect: " & + $expectedProofsLen & + ", get: " & $getProofsLen) + + for i in 0 ..< tx.tx.versionedHashes.len: + # this additional check also done in tx validation + if tx.tx.versionedHashes[i].data[0] != VERSIONED_HASH_VERSION_KZG: + return err("wrong kzg version in versioned hash at index " & $i) + + if tx.tx.versionedHashes[i] != kzgToVersionedHash(tx.blobsBundle.commitments[i].data): + return err("tx versioned hash not match commitments at index " & $i) + + let + # Instead of converting blobs on stack, we put it on the heap. + # Even a single blob on stack will crash the program when we call + # e.g. `let cf = ?kzg.computeCellsAndKzgProofs(kzg.KzgBlob(bytes: blob.data))` + blobs = tx.blobsBundle.blobs.mapIt(kzg.KzgBlob(bytes: it.data)) + + var + cells = newSeqOfCap[KzgCell](getProofsLen) + cellIndices = newSeqOfCap[uint64](getProofsLen) + commitments = newSeqOfCap[kzg.KzgCommitment](getProofsLen) + + # https://github.com/ethereum/execution-apis/blob/5d634063ccfd897a6974ea589c00e2c1d889abc9/src/engine/osaka.md#specification + for k in 0.. 0: return err("PooledTransaction contains blobs prior to Cancun") let blobsBundleOpt = - if com.isCancunOrLater(blk.header.timestamp): + if com.isOsakaOrLater(blk.header.timestamp): + doAssert blobsBundle.commitments.len == blobsBundle.blobs.len + doAssert blobsBundle.proofs.len == blobsBundle.blobs.len * CELLS_PER_EXT_BLOB + blobsBundle + elif com.isCancunOrLater(blk.header.timestamp): doAssert blobsBundle.commitments.len == blobsBundle.blobs.len doAssert blobsBundle.proofs.len == blobsBundle.blobs.len - Opt.some blobsBundle + blobsBundle else: - Opt.none BlobsBundle + BlobsBundle(nil) if someBaseFee: # make sure baseFee always has something diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index 10ac91192f..81916bbb7c 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -26,6 +26,7 @@ import ../chain/forked_chain, ../pow/header, ../eip4844, + ../eip7594, ../validate, ../pooled_txs, ./tx_tabs, @@ -242,6 +243,21 @@ proc classifyValid(xp: TxPoolRef; tx: Transaction, sender: Address): bool = value = tx.value true +proc validateBlobTransactionWrapper(tx: PooledTransaction, fork: EVMFork): + Result[void, string] {.raises: [].} = + if tx.blobsBundle.isNil: + return err("tx wrapper is none") + + case tx.blobsBundle.wrapperVersion + of WrapperVersionEIP4844: + if fork >= FkOsaka: + return err("Blobsbundle version expect fork before Osaka") + validateBlobTransactionWrapper4844(tx) + of WrapperVersionEIP7594: + if fork < FkOsaka: + return err("Blobsbundle version expect Osaka or later") + validateBlobTransactionWrapper7594(tx) + # ------------------------------------------------------------------------------ # Public functions, constructor # ------------------------------------------------------------------------------ @@ -331,7 +347,7 @@ proc addTx*(xp: TxPoolRef, ptx: PooledTransaction): Result[void, TxError] = if size > BLOB_TX_MAX_SIZE: return err(txErrorOversized) - ptx.validateBlobTransactionWrapper().isOkOr: + ptx.validateBlobTransactionWrapper(xp.nextFork).isOkOr: debug "Invalid transaction: Blob transaction wrapper validation failed", tx = ptx.tx, error = error diff --git a/hive_integration/nodocker/engine/cancun/blobs.nim b/hive_integration/nodocker/engine/cancun/blobs.nim index 5c1e6186f9..dec6b9f115 100644 --- a/hive_integration/nodocker/engine/cancun/blobs.nim +++ b/hive_integration/nodocker/engine/cancun/blobs.nim @@ -154,8 +154,24 @@ proc blobDataGenerator*(startBlobId: BlobID, blobCount: int): BlobTxWrapData = let res = generateBlob(startBlobId + BlobID(i)) result.blobs[i] = res.blob result.commitments[i] = res.commitment - result.hashes[i] = kzgToVersionedHash(result.commitments[i]) + result.hashes[i] = kzgToVersionedHash(result.commitments[i].bytes) let z = computeBlobKzgProof(result.blobs[i], result.commitments[i]) if z.isErr: doAssert(false, z.error) result.proofs[i] = z.get() + +proc blobDataGenerator7594*(startBlobId: BlobID, blobCount: int): BlobTxWrapData = + result.blobs = newSeq[kzg.KzgBlob](blobCount) + result.commitments = newSeq[kzg.KzgCommitment](blobCount) + result.hashes = newSeq[Hash32](blobCount) + result.proofs = newSeqOfCap[kzg.KzgProof](blobCount * CELLS_PER_EXT_BLOB) + + for i in 0..= 1000.u256 + + test "Test TxPool with blobhash block": + let + acc = mx.getAccount(21) + tx1 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 0) + tx2 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 1) - test "Test TxPool with PoS block": - let - acc = mx.getAccount(20) - tc = BaseTx( - txType: Opt.some(TxLegacy), - recipient: Opt.some(recipient), - gasLimit: 75000, - amount: amount, - ) - ptx = mx.makeTx(tc, acc, 0) + xp.checkAddTx(tx1) + xp.checkAddTx(tx2) - xp.checkAddTx(ptx) - let bundle = xp.checkAssembleBlock(1) - xp.checkImportBlock(bundle, 0) + template header(): Header = + bundle.blk.header - let - sdb = LedgerRef.init(chain.latestTxFrame) - val = sdb.getStorage(recipient, slot) - randao = Bytes32(val.toBytesBE) - fee = sdb.getBalance(feeRecipient) - bal = sdb.getBalance(recipient) - - check randao == prevRandao - check bundle.blk.header.coinbase == feeRecipient - check not fee.isZero - check bal >= 1000.u256 - - test "Test TxPool with blobhash block": - let - acc = mx.getAccount(21) - tx1 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 0) - tx2 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 1) + let + bundle = xp.checkAssembleBlock(2) + gasUsed1 = xp.vmState.receipts[0].cumulativeGasUsed + gasUsed2 = xp.vmState.receipts[1].cumulativeGasUsed - gasUsed1 + totalBlobGasUsed = tx1.tx.getTotalBlobGas + tx2.tx.getTotalBlobGas + blockValue = + gasUsed1.u256 * tx1.tx.effectiveGasTip(header.baseFeePerGas).u256 + + gasUsed2.u256 * tx2.tx.effectiveGasTip(header.baseFeePerGas).u256 - xp.checkAddTx(tx1) - xp.checkAddTx(tx2) + check blockValue == bundle.blockValue + check totalBlobGasUsed == header.blobGasUsed.get() - template header(): Header = - bundle.blk.header + xp.checkImportBlock(bundle, 0) - let - bundle = xp.checkAssembleBlock(2) - gasUsed1 = xp.vmState.receipts[0].cumulativeGasUsed - gasUsed2 = xp.vmState.receipts[1].cumulativeGasUsed - gasUsed1 - totalBlobGasUsed = tx1.tx.getTotalBlobGas + tx2.tx.getTotalBlobGas - blockValue = - gasUsed1.u256 * tx1.tx.effectiveGasTip(header.baseFeePerGas).u256 + - gasUsed2.u256 * tx2.tx.effectiveGasTip(header.baseFeePerGas).u256 + let + sdb = LedgerRef.init(chain.latestTxFrame) + val = sdb.getStorage(recipient, slot) + randao = Bytes32(val.toBytesBE) + bal = sdb.getBalance(feeRecipient) - check blockValue == bundle.blockValue - check totalBlobGasUsed == header.blobGasUsed.get() + check randao == prevRandao + check header.coinbase == feeRecipient + check not bal.isZero - xp.checkImportBlock(bundle, 0) + ## see github.com/status-im/nimbus-eth1/issues/1031 + test "TxPool: Synthesising blocks (covers issue #1031)": + const + txPerblock = 20 + numBlocks = 10 - let - sdb = LedgerRef.init(chain.latestTxFrame) - val = sdb.getStorage(recipient, slot) - randao = Bytes32(val.toBytesBE) - bal = sdb.getBalance(feeRecipient) + let + lastNumber = chain.latestNumber + tc = BaseTx( + gasLimit: 75000, + recipient: Opt.some(recipient214), + amount: amount, + ) - check randao == prevRandao - check header.coinbase == feeRecipient - check not bal.isZero + for n in 0 ..< numBlocks: + for tn in 0 ..< txPerblock: + let tx = mx.makeNextTx(tc) + xp.checkAddTx(tx) - ## see github.com/status-im/nimbus-eth1/issues/1031 - test "TxPool: Synthesising blocks (covers issue #1031)": - const - txPerblock = 20 - numBlocks = 10 + xp.checkImportBlock(txPerblock, 0) - let - lastNumber = chain.latestNumber - tc = BaseTx( - gasLimit: 75000, - recipient: Opt.some(recipient214), - amount: amount, - ) + let syncCurrent = lastNumber + numBlocks + let + head = chain.headerByNumber(syncCurrent).expect("block header exists") + sdb = LedgerRef.init(chain.latestTxFrame) + expected = u256(txPerblock * numBlocks) * amount + balance = sdb.getBalance(recipient214) + check balance == expected + discard head + + test "Test get parent transactions after persistBlock": + let + acc = mx.getAccount(22) + tx1 = mx.makeTx(acc, recipient, 1.u256, 0) + tx2 = mx.makeTx(acc, recipient, 2.u256, 1) - for n in 0 ..< numBlocks: - for tn in 0 ..< txPerblock: - let tx = mx.makeNextTx(tc) - xp.checkAddTx(tx) + xp.checkAddTx(tx1) + xp.checkAddTx(tx2) - xp.checkImportBlock(txPerblock, 0) + xp.checkImportBlock(2, 0) - let syncCurrent = lastNumber + numBlocks - let - head = chain.headerByNumber(syncCurrent).expect("block header exists") - sdb = LedgerRef.init(chain.latestTxFrame) - expected = u256(txPerblock * numBlocks) * amount - balance = sdb.getBalance(recipient214) - check balance == expected - discard head - - test "Test get parent transactions after persistBlock": - let - acc = mx.getAccount(22) - tx1 = mx.makeTx(acc, recipient, 1.u256, 0) - tx2 = mx.makeTx(acc, recipient, 2.u256, 1) + let + tx3 = mx.makeTx(acc, recipient, 3.u256, 2) + tx4 = mx.makeTx(acc, recipient, 4.u256, 3) + tx5 = mx.makeTx(acc, recipient, 5.u256, 4) + + xp.checkAddTx(tx3) + xp.checkAddTx(tx4) + xp.checkAddTx(tx5) + + xp.checkImportBlock(3, 0) + let latestHash = chain.latestHash + check (waitFor env.chain.forkChoice(latestHash, latestHash)).isOk + + let hs = [ + computeRlpHash(tx1), + computeRlpHash(tx2), + computeRlpHash(tx3), + computeRlpHash(tx4), + computeRlpHash(tx5), + ] + + let res = chain.blockByNumber(chain.latestHeader.number - 1) + if res.isErr: + debugEcho res.error + check false + + let parent = res.get + var count = 0 + for txh in chain.txHashInRange(latestHash, parent.header.parentHash): + check txh in hs + inc count + check count == hs.len + + test "EIP-7702 transaction before Prague": + let + acc = mx.getAccount(24) + auth = mx.makeAuth(acc, 0) + tc = BaseTx( + txType: Opt.some(TxEip7702), + gasLimit: 75000, + recipient: Opt.some(recipient214), + amount: amount, + authorizationList: @[auth], + ) + tx = mx.makeTx(tc, 0) - xp.checkAddTx(tx1) - xp.checkAddTx(tx2) + xp.checkAddTx(tx, txErrorBasicValidation) - xp.checkImportBlock(2, 0) + test "EIP-7702 transaction invalid auth signature": + let + env = initEnv(Prague) + xp = env.xp + mx = env.sender + acc = mx.getAccount(25) + auth = mx.makeAuth(acc, 0) + tc = BaseTx( + txType: Opt.some(TxEip7702), + gasLimit: 75000, + recipient: Opt.some(recipient214), + amount: amount, + authorizationList: @[auth], + ) + ptx = mx.makeTx(tc, 0) - let - tx3 = mx.makeTx(acc, recipient, 3.u256, 2) - tx4 = mx.makeTx(acc, recipient, 4.u256, 3) - tx5 = mx.makeTx(acc, recipient, 5.u256, 4) - - xp.checkAddTx(tx3) - xp.checkAddTx(tx4) - xp.checkAddTx(tx5) - - xp.checkImportBlock(3, 0) - let latestHash = chain.latestHash - check (waitFor env.chain.forkChoice(latestHash, latestHash)).isOk - - let hs = [ - computeRlpHash(tx1), - computeRlpHash(tx2), - computeRlpHash(tx3), - computeRlpHash(tx4), - computeRlpHash(tx5), - ] - - let res = chain.blockByNumber(chain.latestHeader.number - 1) - if res.isErr: - debugEcho res.error - check false + # invalid auth + var invauth = auth + invauth.v = 3.uint64 + let + ctx = CustomTx(auth: Opt.some(invauth)) + tx = mx.customizeTransaction(acc, ptx.tx, ctx) - let parent = res.get - var count = 0 - for txh in chain.txHashInRange(latestHash, parent.header.parentHash): - check txh in hs - inc count - check count == hs.len + xp.checkAddTx(tx) + # invalid auth, but the tx itself still valid + xp.checkImportBlock(1, 0) + + test "Blobschedule": + let + cc = env.conf.networkParams.config + acc = mx.getAccount(26) + tc = BlobTx( + txType: Opt.some(TxEip4844), + gasLimit: 75000, + recipient: Opt.some(acc.address), + blobID: 0.BlobID, + blobCount: 1 + ) + tx1 = mx.makeTx(tc, acc, 0) + tx2 = mx.makeTx(tc, acc, 1) + tx3 = mx.makeTx(tc, acc, 2) + tx4 = mx.makeTx(tc, acc, 3) + + xp.checkAddTx(tx1) + xp.checkAddTx(tx2) + xp.checkAddTx(tx3) + xp.checkAddTx(tx4) + + # override current blobSchedule + let bs = cc.blobSchedule[Cancun] + cc.blobSchedule[Cancun] = Opt.some( + BlobSchedule(target: 2, max: 3, baseFeeUpdateFraction: 3338477) + ) + + # allow 3 blobs + xp.checkImportBlock(3, 1) + + # consume the rest of blobs + xp.checkImportBlock(1, 0) + + # restore blobSchedule + cc.blobSchedule[Cancun] = bs + + test "non blob tx size limit": + proc dataTx(nonce: AccountNonce, env: TestEnv, btx: BaseTx): (PooledTransaction, PooledTransaction) = + const largeDataLength = TX_MAX_SIZE - 200 # enough to have a 5 bytes RLP encoding of the data length number + var tc = btx + tc.payload = newSeq[byte](largeDataLength) - test "EIP-7702 transaction before Prague": - let - acc = mx.getAccount(24) - auth = mx.makeAuth(acc, 0) - tc = BaseTx( - txType: Opt.some(TxEip7702), - gasLimit: 75000, - recipient: Opt.some(recipient214), - amount: amount, - authorizationList: @[auth], - ) - tx = mx.makeTx(tc, 0) - - xp.checkAddTx(tx, txErrorBasicValidation) - - test "EIP-7702 transaction invalid auth signature": let - env = initEnv(Prague) - xp = env.xp mx = env.sender - acc = mx.getAccount(25) - auth = mx.makeAuth(acc, 0) - tc = BaseTx( - txType: Opt.some(TxEip7702), - gasLimit: 75000, - recipient: Opt.some(recipient214), - amount: amount, - authorizationList: @[auth], - ) + acc = mx.getAccount(27) ptx = mx.makeTx(tc, 0) + txSize = getEncodedLength(ptx.tx) + maxTxLengthWithoutData = txSize - largeDataLength + maxTxDataLength = TX_MAX_SIZE - maxTxLengthWithoutData - # invalid auth - var invauth = auth - invauth.v = 3.uint64 - let - ctx = CustomTx(auth: Opt.some(invauth)) - tx = mx.customizeTransaction(acc, ptx.tx, ctx) + tc.payload = newSeq[byte](maxTxDataLength) + let ptx1 = mx.makeTx(tc, acc, nonce) + tc.payload = newSeq[byte](maxTxDataLength + 1) + let ptx2 = mx.makeTx(tc, acc, nonce) + (ptx1, ptx2) - xp.checkAddTx(tx) - # invalid auth, but the tx itself still valid - xp.checkImportBlock(1, 0) + let + env = initEnv(Prague) + xp = env.xp + mx = env.sender + acc = mx.getAccount(27) - test "Blobschedule": - let - cc = env.conf.networkParams.config - acc = mx.getAccount(26) - tc = BlobTx( - txType: Opt.some(TxEip4844), - gasLimit: 75000, - recipient: Opt.some(acc.address), - blobID: 0.BlobID, - blobCount: 1 - ) - tx1 = mx.makeTx(tc, acc, 0) - tx2 = mx.makeTx(tc, acc, 1) - tx3 = mx.makeTx(tc, acc, 2) - tx4 = mx.makeTx(tc, acc, 3) - - xp.checkAddTx(tx1) - xp.checkAddTx(tx2) - xp.checkAddTx(tx3) - xp.checkAddTx(tx4) - - # override current blobSchedule - let bs = cc.blobSchedule[Cancun] - cc.blobSchedule[Cancun] = Opt.some( - BlobSchedule(target: 2, max: 3, baseFeeUpdateFraction: 3338477) - ) + let + auth = mx.makeAuth(acc, 0) + + (tx1ok, tx1bad) = dataTx(0, env, BaseTx( + txType: Opt.some(TxEip7702), + gasLimit: 3_000_000, + recipient: Opt.some(recipient214), + amount: amount, + authorizationList: @[auth], + )) - # allow 3 blobs - xp.checkImportBlock(3, 1) + (tx2ok, tx2bad) = dataTx(1, env, BaseTx( + txType: Opt.some(TxLegacy), + recipient: Opt.some(recipient214), + gasLimit: 3_000_000 + )) - # consume the rest of blobs - xp.checkImportBlock(1, 0) + (tx3ok, tx3bad) = dataTx(2, env, BaseTx( + txType: Opt.some(TxEip1559), + recipient: Opt.some(recipient214), + gasLimit: 3_000_000 + )) - # restore blobSchedule - cc.blobSchedule[Cancun] = bs + (tx4init, tx4bad) = dataTx(3, env, BaseTx( + txType: Opt.some(TxLegacy), + gasLimit: 3_000_000 + )) - test "non blob tx size limit": - proc dataTx(nonce: AccountNonce, env: TestEnv, btx: BaseTx): (PooledTransaction, PooledTransaction) = - const largeDataLength = TX_MAX_SIZE - 200 # enough to have a 5 bytes RLP encoding of the data length number - var tc = btx - tc.payload = newSeq[byte](largeDataLength) + xp.checkAddTx(tx1ok) + xp.checkAddTx(tx2ok) + xp.checkAddTx(tx3ok) - let - mx = env.sender - acc = mx.getAccount(27) - ptx = mx.makeTx(tc, 0) - txSize = getEncodedLength(ptx.tx) - maxTxLengthWithoutData = txSize - largeDataLength - maxTxDataLength = TX_MAX_SIZE - maxTxLengthWithoutData + xp.checkAddTx(tx1bad, txErrorOversized) + xp.checkAddTx(tx2bad, txErrorOversized) + xp.checkAddTx(tx3bad, txErrorOversized) - tc.payload = newSeq[byte](maxTxDataLength) - let ptx1 = mx.makeTx(tc, acc, nonce) - tc.payload = newSeq[byte](maxTxDataLength + 1) - let ptx2 = mx.makeTx(tc, acc, nonce) - (ptx1, ptx2) + # exceeds max init code size + xp.checkAddTx(tx4init, txErrorBasicValidation) + xp.checkAddTx(tx4bad, txErrorOversized) - let - env = initEnv(Prague) - xp = env.xp - mx = env.sender - acc = mx.getAccount(27) + xp.checkImportBlock(1, 2) + xp.checkImportBlock(1, 1) + xp.checkImportBlock(1, 0) - let - auth = mx.makeAuth(acc, 0) - - (tx1ok, tx1bad) = dataTx(0, env, BaseTx( - txType: Opt.some(TxEip7702), - gasLimit: 3_000_000, - recipient: Opt.some(recipient214), - amount: amount, - authorizationList: @[auth], - )) - - (tx2ok, tx2bad) = dataTx(1, env, BaseTx( - txType: Opt.some(TxLegacy), - recipient: Opt.some(recipient214), - gasLimit: 3_000_000 - )) - - (tx3ok, tx3bad) = dataTx(2, env, BaseTx( - txType: Opt.some(TxEip1559), - recipient: Opt.some(recipient214), - gasLimit: 3_000_000 - )) - - (tx4init, tx4bad) = dataTx(3, env, BaseTx( - txType: Opt.some(TxLegacy), - gasLimit: 3_000_000 - )) - - xp.checkAddTx(tx1ok) - xp.checkAddTx(tx2ok) - xp.checkAddTx(tx3ok) - - xp.checkAddTx(tx1bad, txErrorOversized) - xp.checkAddTx(tx2bad, txErrorOversized) - xp.checkAddTx(tx3bad, txErrorOversized) - - # exceeds max init code size - xp.checkAddTx(tx4init, txErrorBasicValidation) - xp.checkAddTx(tx4bad, txErrorOversized) - - xp.checkImportBlock(1, 2) - xp.checkImportBlock(1, 1) - xp.checkImportBlock(1, 0) - - test "EIP-7702 transaction invalid zero auth": - let - env = initEnv(Prague) - xp = env.xp - mx = env.sender - acc = mx.getAccount(29) - tc = BaseTx( - txType: Opt.some(TxEip7702), - gasLimit: 75000, - recipient: Opt.some(recipient214), - amount: amount, - ) - tx = mx.makeTx(tc, acc, 0) - - xp.checkAddTx(tx, txErrorBasicValidation) - -txPoolMain() + test "EIP-7702 transaction invalid zero auth": + let + env = initEnv(Prague) + xp = env.xp + mx = env.sender + acc = mx.getAccount(29) + tc = BaseTx( + txType: Opt.some(TxEip7702), + gasLimit: 75000, + recipient: Opt.some(recipient214), + amount: amount, + ) + tx = mx.makeTx(tc, acc, 0) + + xp.checkAddTx(tx, txErrorBasicValidation) + + test "EIP-7594 BlobsBundle on Prague": + let + env = initEnv(Prague) + xp = env.xp + mx = env.sender + acc = mx.getAccount(30) + tx = mx.createPooledTransactionWithBlob7594(acc, recipient, amount, 0) + + check tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594 + xp.checkAddTx(tx, txErrorInvalidBlob) + + test "EIP-4844 BlobsBundle on Osaka": + let + env = initEnv(Osaka) + xp = env.xp + mx = env.sender + acc = mx.getAccount(30) + tx = mx.createPooledTransactionWithBlob(acc, recipient, amount, 0) + + check tx.blobsBundle.wrapperVersion == WrapperVersionEIP4844 + xp.checkAddTx(tx, txErrorInvalidBlob) + + test "EIP-7594 BlobsBundle on Osaka": + let + env = initEnv(Osaka) + xp = env.xp + mx = env.sender + acc = mx.getAccount(30) + tx = mx.createPooledTransactionWithBlob7594(acc, recipient, amount, 0) + + check tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594 + xp.checkAddTx(tx) + xp.checkImportBlock(1, 0) From 73dd1205765c3eb0fc4291d4bdc9818e44d39d2c Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Wed, 4 Jun 2025 13:44:55 +0530 Subject: [PATCH 066/138] Fusaka: add getPayloadV5 to engineAPI (#3363) * add getPayloadV5 to engineAPI * older fork validation * fix: error message --- execution_chain/beacon/api_handler.nim | 1 + .../beacon/api_handler/api_getpayload.nim | 41 ++++++++++++++++++- execution_chain/rpc/engine_api.nim | 4 ++ 3 files changed, 45 insertions(+), 1 deletion(-) diff --git a/execution_chain/beacon/api_handler.nim b/execution_chain/beacon/api_handler.nim index 636cd8cb4e..8c1a3412f4 100644 --- a/execution_chain/beacon/api_handler.nim +++ b/execution_chain/beacon/api_handler.nim @@ -24,6 +24,7 @@ export getPayload, getPayloadV3, getPayloadV4, + getPayloadV5, getPayloadBodiesByHash, getPayloadBodiesByRange, newPayload, diff --git a/execution_chain/beacon/api_handler/api_getpayload.nim b/execution_chain/beacon/api_handler/api_getpayload.nim index 8066bd9b42..976d52fb2f 100644 --- a/execution_chain/beacon/api_handler/api_getpayload.nim +++ b/execution_chain/beacon/api_handler/api_getpayload.nim @@ -26,7 +26,10 @@ proc getPayload*(ben: BeaconEngineRef, let bundle = ben.getPayloadBundle(id).valueOr: raise unknownPayload("Unknown bundle") - let version = bundle.payload.version + let + version = bundle.payload.version + com = ben.com + if version > expectedVersion: raise unsupportedFork("getPayload" & $expectedVersion & " expect payload" & $expectedVersion & @@ -35,6 +38,9 @@ proc getPayload*(ben: BeaconEngineRef, raise unsupportedFork("getPayload" & $expectedVersion & " contains unsupported BlobsBundleV1") + if com.isOsakaOrLater(ethTime bundle.payload.timestamp): + raise unsupportedFork("bundle timestamp greater than Osaka must use getPayloadV5") + GetPayloadV2Response( executionPayload: bundle.payload.V1V2, blockValue: bundle.blockValue @@ -57,6 +63,9 @@ proc getPayloadV3*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV3Response = if not com.isCancunOrLater(ethTime bundle.payload.timestamp): raise unsupportedFork("bundle timestamp is less than Cancun activation") + if com.isOsakaOrLater(ethTime bundle.payload.timestamp): + raise unsupportedFork("bundle timestamp greater than Osaka must use getPayloadV5") + GetPayloadV3Response( executionPayload: bundle.payload.V3, blockValue: bundle.blockValue, @@ -83,6 +92,9 @@ proc getPayloadV4*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV4Response = if not com.isPragueOrLater(ethTime bundle.payload.timestamp): raise unsupportedFork("bundle timestamp is less than Prague activation") + if com.isOsakaOrLater(ethTime bundle.payload.timestamp): + raise unsupportedFork("bundle timestamp greater than Osaka must use getPayloadV5") + GetPayloadV4Response( executionPayload: bundle.payload.V3, blockValue: bundle.blockValue, @@ -90,3 +102,30 @@ proc getPayloadV4*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV4Response = shouldOverrideBuilder: false, executionRequests: bundle.executionRequests.get, ) + +proc getPayloadV5*(ben: BeaconEngineRef, id: Bytes8): GetPayloadV5Response = + trace "Engine API request received", + meth = "GetPayload", id + + let bundle = ben.getPayloadBundle(id).valueOr: + raise unknownPayload("Unknown bundle") + + let version = bundle.payload.version + if version != Version.V3: + raise unsupportedFork("getPayloadV5 expect payloadV3 but get payload" & $version) + if bundle.blobsBundle.isNil: + raise unsupportedFork("getPayloadV5 is missing BlobsBundleV2") + if bundle.executionRequests.isNone: + raise unsupportedFork("getPayloadV5 is missing executionRequests") + + let com = ben.com + if not com.isOsakaOrLater(ethTime bundle.payload.timestamp): + raise unsupportedFork("bundle timestamp is less than Osaka activation") + + GetPayloadV5Response( + executionPayload: bundle.payload.V3, + blockValue: bundle.blockValue, + blobsBundle: bundle.blobsBundle.V2, + shouldOverrideBuilder: false, + executionRequests: bundle.executionRequests.get, + ) \ No newline at end of file diff --git a/execution_chain/rpc/engine_api.nim b/execution_chain/rpc/engine_api.nim index 1c1a70b03b..fe3a0382b6 100644 --- a/execution_chain/rpc/engine_api.nim +++ b/execution_chain/rpc/engine_api.nim @@ -29,6 +29,7 @@ const supportedMethods: HashSet[string] = "engine_getPayloadV2", "engine_getPayloadV3", "engine_getPayloadV4", + "engine_getPayloadV5", "engine_forkchoiceUpdatedV1", "engine_forkchoiceUpdatedV2", "engine_forkchoiceUpdatedV3", @@ -76,6 +77,9 @@ proc setupEngineAPI*(engine: BeaconEngineRef, server: RpcServer) = server.rpc("engine_getPayloadV4") do(payloadId: Bytes8) -> GetPayloadV4Response: return engine.getPayloadV4(payloadId) + server.rpc("engine_getPayloadV5") do(payloadId: Bytes8) -> GetPayloadV5Response: + return engine.getPayloadV5(payloadId) + server.rpc("engine_forkchoiceUpdatedV1") do(update: ForkchoiceStateV1, attrs: Opt[PayloadAttributesV1]) -> ForkchoiceUpdatedResponse: await engine.forkchoiceUpdated(Version.V1, update, attrs.payloadAttributes) From d3797ebf3f306c5d7c71dfe2accd830a73620e1d Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 4 Jun 2025 15:18:49 +0700 Subject: [PATCH 067/138] Prevent txpool from crashing because of EIP-7594 blobs (#3361) * Prevent txpool from crashing because of EIP-7594 blobs * Fix comment --- execution_chain/core/tx_pool/tx_desc.nim | 11 +++-- execution_chain/core/tx_pool/tx_item.nim | 7 +++ execution_chain/core/tx_pool/tx_tabs.nim | 40 +++++++++++++-- tests/test_txpool.nim | 63 +++++++++++++++++++++--- 4 files changed, 108 insertions(+), 13 deletions(-) diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index 81916bbb7c..da66bb4832 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -251,11 +251,14 @@ proc validateBlobTransactionWrapper(tx: PooledTransaction, fork: EVMFork): case tx.blobsBundle.wrapperVersion of WrapperVersionEIP4844: if fork >= FkOsaka: - return err("Blobsbundle version expect fork before Osaka") + return err("Blobsbundle version 0 expect fork before Osaka") validateBlobTransactionWrapper4844(tx) of WrapperVersionEIP7594: - if fork < FkOsaka: - return err("Blobsbundle version expect Osaka or later") + # Allow this kind of Blob when Prague still active. + # Because after transitioned to Osaka or later, + # it can be included in the next fork + if fork < FkPrague: + return err("Blobsbundle version 1 expect Prague or later") validateBlobTransactionWrapper7594(tx) # ------------------------------------------------------------------------------ @@ -425,7 +428,7 @@ proc addTx*(xp: TxPoolRef, tx: Transaction): Result[void, TxError] = iterator byPriceAndNonce*(xp: TxPoolRef): TxItemRef = for item in byPriceAndNonce(xp.senderTab, xp.idTab, - xp.blobTab, xp.vmState.ledger, xp.baseFee): + xp.blobTab, xp.vmState.ledger, xp.baseFee, xp.nextFork): yield item func getBlobAndProofV1*(xp: TxPoolRef, v: VersionedHash): Opt[BlobAndProofV1] = diff --git a/execution_chain/core/tx_pool/tx_item.nim b/execution_chain/core/tx_pool/tx_item.nim index b83e532bc2..fb10b01632 100644 --- a/execution_chain/core/tx_pool/tx_item.nim +++ b/execution_chain/core/tx_pool/tx_item.nim @@ -123,6 +123,13 @@ template price*(item: TxItemRef): GasInt = ## Getter item.price +func wrapperVersion*(item: TxItemRef): Opt[WrapperVersion] = + ## Getter + if item.ptx.blobsBundle.isNil: + return Opt.none(WrapperVersion) + else: + return Opt.some(item.ptx.blobsBundle.wrapperVersion) + func calculatePrice*(item: TxItemRef; baseFee: GasInt) = ## Profit calculator item.price = item.tx.gasLimit * item.tx.tip(baseFee) + item.tx.getTotalBlobGas diff --git a/execution_chain/core/tx_pool/tx_tabs.nim b/execution_chain/core/tx_pool/tx_tabs.nim index 66976f8e76..7692024423 100644 --- a/execution_chain/core/tx_pool/tx_tabs.nim +++ b/execution_chain/core/tx_pool/tx_tabs.nim @@ -17,6 +17,8 @@ import eth/common/hashes, stew/sorted_set, ../../db/ledger, + ../../common/evmforks, + ../pooled_txs, ./tx_item type @@ -55,11 +57,42 @@ func removeLookup*(blobTab: var BlobLookupTab, item: TxItemRef) = for v in item.tx.versionedHashes: blobTab.del(v) +proc validBlobItem(item: TxItemRef; + fork: EVMFork; + sn: TxSenderNonceRef; + idTab: var TxIdTab; + blobTab: var BlobLookupTab; + ): bool = + let wrapperVersion = item.wrapperVersion.valueOr: + # Without blobs is ok + return true + + if fork < FkCancun: + # No blobs allowed + return false + + case wrapperVersion + of WrapperVersionEIP4844: + if fork >= FkOsaka: + # Should not exist anymore + idTab.del(item.id) + blobTab.removeLookup(item) + discard sn.list.delete(item.nonce) + return false + + of WrapperVersionEIP7594: + if fork < FkOsaka: + # Not participate in block building but maybe eligible for next fork + return false + + true + iterator byPriceAndNonce*(senderTab: TxSenderTab, idTab: var TxIdTab, blobTab: var BlobLookupTab, ledger: LedgerRef, - baseFee: GasInt): TxItemRef = + baseFee: GasInt, + fork: EVMFork): TxItemRef = ## This algorithm and comment is taken from ethereumjs but modified. ## @@ -86,8 +119,9 @@ iterator byPriceAndNonce*(senderTab: TxSenderTab, let rc = sn.list.ge(nonce) if rc.isOk: let item = rc.get.data - item.calculatePrice(baseFee) - byPrice.push(item) + if item.validBlobItem(fork, sn, idTab, blobTab): + item.calculatePrice(baseFee) + byPrice.push(item) # HeapQueue needs `<` to be overloaded for custom object # and in this case, we want to pop highest price first. diff --git a/tests/test_txpool.nim b/tests/test_txpool.nim index bb9a979b63..118e84d112 100644 --- a/tests/test_txpool.nim +++ b/tests/test_txpool.nim @@ -56,7 +56,7 @@ type CustomTx = CustomTransactionData -proc initEnv(envFork: HardFork): TestEnv = +proc initConf(envFork: HardFork): NimbusConf = var conf = makeConfig( @["--custom-network:" & genesisFile] ) @@ -88,10 +88,12 @@ proc initEnv(envFork: HardFork): TestEnv = cc.osakaTime = Opt.some(0.EthTime) conf.networkParams.genesis.alloc[recipient] = GenesisAccount(code: contractCode) + conf +proc initEnv(conf: NimbusConf): TestEnv = let # create the sender first, because it will modify networkParams - sender = TxSender.new(conf.networkParams, 35) + sender = TxSender.new(conf.networkParams, 30) com = CommonRef.new(newCoreDbRef DefaultDbMemory, nil, conf.networkId, conf.networkParams) chain = ForkedChainRef.init(com) @@ -104,6 +106,10 @@ proc initEnv(envFork: HardFork): TestEnv = sender: sender ) +proc initEnv(envFork: HardFork): TestEnv = + let conf = initConf(envFork) + initEnv(conf) + template checkAddTx(xp, tx, errorCode) = let prevCount = xp.len let rc = xp.addTx(tx) @@ -790,18 +796,19 @@ suite "TxPool test suite": env = initEnv(Prague) xp = env.xp mx = env.sender - acc = mx.getAccount(30) + acc = mx.getAccount(0) tx = mx.createPooledTransactionWithBlob7594(acc, recipient, amount, 0) check tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594 - xp.checkAddTx(tx, txErrorInvalidBlob) + xp.checkAddTx(tx) + xp.checkImportBlock(0, 1) test "EIP-4844 BlobsBundle on Osaka": let env = initEnv(Osaka) xp = env.xp mx = env.sender - acc = mx.getAccount(30) + acc = mx.getAccount(0) tx = mx.createPooledTransactionWithBlob(acc, recipient, amount, 0) check tx.blobsBundle.wrapperVersion == WrapperVersionEIP4844 @@ -812,9 +819,53 @@ suite "TxPool test suite": env = initEnv(Osaka) xp = env.xp mx = env.sender - acc = mx.getAccount(30) + acc = mx.getAccount(0) tx = mx.createPooledTransactionWithBlob7594(acc, recipient, amount, 0) check tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594 xp.checkAddTx(tx) xp.checkImportBlock(1, 0) + + test "EIP-7594 BlobsBundle transition from Prague to Osaka": + let + conf = initConf(Prague) + cc = conf.networkParams.config + timestamp = EthTime.now() + + # set osaka transition time + cc.osakaTime = Opt.some(timestamp + 2) + + let + env = initEnv(conf) + xp = env.xp + mx = env.sender + acc = mx.getAccount(0) + acc1 = mx.getAccount(1) + tx0 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 0) + tx1 = mx.createPooledTransactionWithBlob(acc, recipient, amount, 1) + + let bs = cc.blobSchedule[Prague] + cc.blobSchedule[Prague] = Opt.some( + BlobSchedule(target: 1, max: 1, baseFeeUpdateFraction: 5_007_716'u64) + ) + + xp.timestamp = timestamp + xp.checkAddTx(tx0) + xp.checkAddTx(tx1) + + # allow 1 blob tx, remaining 1 + xp.checkImportBlock(1, 1) + + let tx2 = mx.createPooledTransactionWithBlob7594(acc1, recipient, amount, 0) + xp.checkAddTx(tx2) + + # still 2 txs in pool + check xp.len == 2 + + # only allow 1 blob tx, the other one removed automatically + xp.checkImportBlock(1, 0) + + check xp.com.isOsakaOrLater(xp.timestamp) + + # restore blobSchedule + cc.blobSchedule[Prague] = bs From 012b85d29aa1429ccc38e73a40a0f955cee98a5b Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 4 Jun 2025 15:19:04 +0700 Subject: [PATCH 068/138] Fusaka: Add engine_getBlobsV2 (#3362) * Fusaka: Add engine_getBlobsV2 * Return null in case of missing blob --- execution_chain/beacon/api_handler.nim | 3 ++- .../beacon/api_handler/api_getblobs.nim | 24 +++++++++++++++++ execution_chain/beacon/beacon_engine.nim | 4 +++ execution_chain/core/tx_pool.nim | 6 +++-- execution_chain/core/tx_pool/tx_desc.nim | 26 ++++++++++++++++--- execution_chain/rpc/engine_api.nim | 5 ++++ 6 files changed, 62 insertions(+), 6 deletions(-) diff --git a/execution_chain/beacon/api_handler.nim b/execution_chain/beacon/api_handler.nim index 8c1a3412f4..5a45378b40 100644 --- a/execution_chain/beacon/api_handler.nim +++ b/execution_chain/beacon/api_handler.nim @@ -29,4 +29,5 @@ export getPayloadBodiesByRange, newPayload, forkchoiceUpdated, - getBlobsV1 + getBlobsV1, + getBlobsV2 diff --git a/execution_chain/beacon/api_handler/api_getblobs.nim b/execution_chain/beacon/api_handler/api_getblobs.nim index e3326c2be1..145ac1c78e 100644 --- a/execution_chain/beacon/api_handler/api_getblobs.nim +++ b/execution_chain/beacon/api_handler/api_getblobs.nim @@ -24,5 +24,29 @@ proc getBlobsV1*(ben: BeaconEngineRef, if versionedHashes.len > 128: raise tooLargeRequest("the number of requested blobs is too large") + # https://github.com/ethereum/execution-apis/blob/de87e24e0f2fbdbaee0fa36ab61b8ec25d3013d0/src/engine/osaka.md#cancun-api + if ben.latestFork >= Osaka: + raise unsupportedFork( + "getBlobsV1 called after Osaka has been activated") + for v in versionedHashes: result.add ben.txPool.getBlobAndProofV1(v) + +proc getBlobsV2*(ben: BeaconEngineRef, + versionedHashes: openArray[VersionedHash]): + Opt[seq[BlobAndProofV2]] = + # https://github.com/ethereum/execution-apis/blob/de87e24e0f2fbdbaee0fa36ab61b8ec25d3013d0/src/engine/osaka.md#engine_getblobsv2 + if versionedHashes.len > 128: + raise tooLargeRequest("the number of requested blobs is too large") + + if ben.latestFork < Osaka: + raise unsupportedFork( + "getBlobsV2 called before Osaka has been activated") + + var list = newSeqOfCap[BlobAndProofV2](versionedHashes.len) + for v in versionedHashes: + let blobAndProof = ben.txPool.getBlobAndProofV2(v).valueOr: + return Opt.none(seq[BlobAndProofV2]) + list.add blobAndProof + + ok(list) diff --git a/execution_chain/beacon/beacon_engine.nim b/execution_chain/beacon/beacon_engine.nim index 13c36720a7..01868aba7b 100644 --- a/execution_chain/beacon/beacon_engine.nim +++ b/execution_chain/beacon/beacon_engine.nim @@ -253,3 +253,7 @@ proc delayPayloadImport*(ben: BeaconEngineRef, blockHash: Hash32, blk: Block): P # at a later time. ben.chain.quarantine.addOrphan(blockHash, blk) return PayloadStatusV1(status: PayloadExecutionStatus.syncing) + +func latestFork*(ben: BeaconEngineRef): HardFork = + let timestamp = max(ben.txPool.timestamp, ben.chain.latestHeader.timestamp) + ben.com.toHardFork(timestamp) diff --git a/execution_chain/core/tx_pool.nim b/execution_chain/core/tx_pool.nim index 6ee312f54f..efba068c3f 100644 --- a/execution_chain/core/tx_pool.nim +++ b/execution_chain/core/tx_pool.nim @@ -63,7 +63,8 @@ export tx, # : Transaction pooledTx, # : PooledTransaction id, # : Hash32 - sender # : Address + sender, # : Address + nextFork # : EVMFork # ------------------------------------------------------------------------------ # TxPoolRef constructor @@ -96,7 +97,8 @@ export getItem, removeTx, removeExpiredTxs, - getBlobAndProofV1 + getBlobAndProofV1, + getBlobAndProofV2 # addTx(xp: TxPoolRef, ptx: PooledTransaction): Result[void, TxError] # addTx(xp: TxPoolRef, tx: Transaction): Result[void, TxError] diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index da66bb4832..6d476e5771 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -434,12 +434,32 @@ iterator byPriceAndNonce*(xp: TxPoolRef): TxItemRef = func getBlobAndProofV1*(xp: TxPoolRef, v: VersionedHash): Opt[BlobAndProofV1] = xp.blobTab.withValue(v, val): let np = val.item.pooledTx.blobsBundle - return Opt.some(BlobAndProofV1( - blob: np.blobs[val.blobIndex], - proof: np.proofs[val.blobIndex])) + if np.wrapperVersion == WrapperVersionEIP4844: + return Opt.some(BlobAndProofV1( + blob: np.blobs[val.blobIndex], + proof: np.proofs[val.blobIndex])) Opt.none(BlobAndProofV1) +func getBlobAndProofV2*(xp: TxPoolRef, v: VersionedHash): Opt[BlobAndProofV2] = + func getProofs(list: openArray[KzgProof], index: int): array[CELLS_PER_EXT_BLOB, KzgProof] = + let + startIndex = index * CELLS_PER_EXT_BLOB + endIndex = startIndex + CELLS_PER_EXT_BLOB + doAssert(list.len >= endIndex) + + for i in 0.. seq[Opt[BlobAndProofV1]]: return engine.getBlobsV1(versionedHashes) + + server.rpc("engine_getBlobsV2") do(versionedHashes: seq[VersionedHash]) -> + Opt[seq[BlobAndProofV2]]: + return engine.getBlobsV2(versionedHashes) From 31c143f218f7ea8e2c69f7a60034b7e4001fda6c Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Wed, 4 Jun 2025 15:30:25 +0530 Subject: [PATCH 069/138] bpo for fusaka devnet (#3340) * meaningless bpo * increate EVM fork * fix: tests * fix: suggestion --- execution_chain/common/chain_config.nim | 12 +++- execution_chain/common/evmforks.nim | 7 +- execution_chain/common/hardforks.nim | 27 ++++++- execution_chain/core/eip7691.nim | 5 ++ .../core/executor/calculate_reward.nim | 5 ++ execution_chain/evm/interpreter/gas_costs.nim | 5 ++ tests/customgenesis/blobschedule_bpo.json | 70 +++++++++++++++++++ tests/test_genesis.nim | 10 +++ tests/test_helpers.nim | 5 ++ 9 files changed, 143 insertions(+), 3 deletions(-) create mode 100644 tests/customgenesis/blobschedule_bpo.json diff --git a/execution_chain/common/chain_config.nim b/execution_chain/common/chain_config.nim index 4a9116f0e7..333a39ce7f 100644 --- a/execution_chain/common/chain_config.nim +++ b/execution_chain/common/chain_config.nim @@ -276,7 +276,12 @@ const BlobScheduleTable: array[Cancun..HardFork.high, string] = [ "cancun", "prague", - "osaka" + "osaka", + "bpo1", + "bpo2", + "bpo3", + "bpo4", + "bpo5" ] func ofStmt(fork: HardFork, keyName: string, reader: NimNode, value: NimNode): NimNode = @@ -493,6 +498,11 @@ func defaultBlobSchedule*(): array[Cancun..HardFork.high, Opt[BlobSchedule]] = Cancun: Opt.some(BlobSchedule(target: 3'u64, max: 6'u64, baseFeeUpdateFraction: 3_338_477'u64)), Prague: Opt.some(BlobSchedule(target: 6'u64, max: 9'u64, baseFeeUpdateFraction: 5_007_716'u64)), Osaka : Opt.some(BlobSchedule(target: 9'u64, max: 12'u64, baseFeeUpdateFraction: 5_007_716'u64)), + Bpo1 : Opt.none(BlobSchedule), + Bpo2 : Opt.none(BlobSchedule), + Bpo3 : Opt.none(BlobSchedule), + Bpo4 : Opt.none(BlobSchedule), + Bpo5 : Opt.none(BlobSchedule) ] func chainConfigForNetwork*(id: NetworkId): ChainConfig = diff --git a/execution_chain/common/evmforks.nim b/execution_chain/common/evmforks.nim index f46eda6b29..ae70f9bcdb 100644 --- a/execution_chain/common/evmforks.nim +++ b/execution_chain/common/evmforks.nim @@ -24,7 +24,12 @@ type FkShanghai FkCancun FkPrague - FkOsaka + FkOsaka + FkBpo1 + FkBpo2 + FkBpo3 + FkBpo4 + FkBpo5 const FkLatest* = EVMFork.high diff --git a/execution_chain/common/hardforks.nim b/execution_chain/common/hardforks.nim index 29a7f592ad..96976633f0 100644 --- a/execution_chain/common/hardforks.nim +++ b/execution_chain/common/hardforks.nim @@ -38,6 +38,11 @@ type Cancun Prague Osaka + Bpo1 + Bpo2 + Bpo3 + Bpo4 + Bpo5 const lastPurelyBlockNumberBasedFork* = GrayGlacier # MergeFork is special because of TTD. @@ -52,7 +57,7 @@ type ForkTransitionTable* = object blockNumberThresholds*: array[Frontier..GrayGlacier, Opt[BlockNumber]] mergeForkTransitionThreshold*: MergeForkTransitionThreshold - timeThresholds*: array[Shanghai..Osaka, Opt[EthTime]] + timeThresholds*: array[Shanghai..HardFork.high, Opt[EthTime]] # Starting with Shanghai, forking is based on timestamp # rather than block number. @@ -174,6 +179,11 @@ type cancunTime* : Opt[EthTime] pragueTime* : Opt[EthTime] osakaTime* : Opt[EthTime] + bpo1Time* : Opt[EthTime] + bpo2Time* : Opt[EthTime] + bpo3Time* : Opt[EthTime] + bpo4Time* : Opt[EthTime] + bpo5Time* : Opt[EthTime] terminalTotalDifficulty*: Opt[UInt256] depositContractAddress*: Opt[Address] @@ -258,6 +268,11 @@ func toForkTransitionTable*(conf: ChainConfig): ForkTransitionTable = result.timeThresholds[Cancun] = conf.cancunTime result.timeThresholds[Prague] = conf.pragueTime result.timeThresholds[Osaka] = conf.osakaTime + result.timeThresholds[Bpo1] = conf.bpo1Time + result.timeThresholds[Bpo2] = conf.bpo2Time + result.timeThresholds[Bpo3] = conf.bpo3Time + result.timeThresholds[Bpo4] = conf.bpo4Time + result.timeThresholds[Bpo5] = conf.bpo5Time func populateFromForkTransitionTable*(conf: ChainConfig, t: ForkTransitionTable) = conf.homesteadBlock = t.blockNumberThresholds[HardFork.Homestead] @@ -282,6 +297,11 @@ func populateFromForkTransitionTable*(conf: ChainConfig, t: ForkTransitionTable) conf.cancunTime = t.timeThresholds[HardFork.Cancun] conf.pragueTime = t.timeThresholds[HardFork.Prague] conf.osakaTime = t.timeThresholds[HardFork.Osaka] + conf.bpo1Time = t.timeThresholds[HardFork.Bpo1] + conf.bpo2Time = t.timeThresholds[HardFork.Bpo2] + conf.bpo3Time = t.timeThresholds[HardFork.Bpo3] + conf.bpo4Time = t.timeThresholds[HardFork.Bpo4] + conf.bpo5Time = t.timeThresholds[HardFork.Bpo5] # ------------------------------------------------------------------------------ # Map HardFork to EVM Fork @@ -308,6 +328,11 @@ const FkCancun, # Cancun FkPrague, # Prague FkOsaka, # Osaka + FkBpo1, # Bpo1 + FkBpo2, # Bpo2 + FkBpo3, # Bpo3 + FkBpo4, # Bpo4 + FkBpo5, # Bpo5 ] # ------------------------------------------------------------------------------ diff --git a/execution_chain/core/eip7691.nim b/execution_chain/core/eip7691.nim index c7f2fa1242..ae0364e4b3 100644 --- a/execution_chain/core/eip7691.nim +++ b/execution_chain/core/eip7691.nim @@ -21,6 +21,11 @@ const Cancun, Prague, Osaka, + Bpo1, + Bpo2, + Bpo3, + Bpo4, + Bpo5 ] func getMaxBlobsPerBlock*(com: CommonRef, fork: EVMFork): uint64 = diff --git a/execution_chain/core/executor/calculate_reward.nim b/execution_chain/core/executor/calculate_reward.nim index 762bbf0e07..64b24c98d1 100644 --- a/execution_chain/core/executor/calculate_reward.nim +++ b/execution_chain/core/executor/calculate_reward.nim @@ -47,6 +47,11 @@ const eth0, # Cancun eth0, # Prague eth0, # Osaka + eth0, # Bpo1 + eth0, # Bpo2 + eth0, # Bpo3 + eth0, # Bpo4 + eth0 # Bpo5 ] proc calculateReward*(vmState: BaseVMState; account: Address; diff --git a/execution_chain/evm/interpreter/gas_costs.nim b/execution_chain/evm/interpreter/gas_costs.nim index 16193ff3c7..684d3d5b0b 100644 --- a/execution_chain/evm/interpreter/gas_costs.nim +++ b/execution_chain/evm/interpreter/gas_costs.nim @@ -790,6 +790,11 @@ const FkCancun: ShanghaiGasFees, FkPrague: ShanghaiGasFees, FkOsaka: ShanghaiGasFees, + FkBpo1: ShanghaiGasFees, + FkBpo2: ShanghaiGasFees, + FkBpo3: ShanghaiGasFees, + FkBpo4: ShanghaiGasFees, + FkBpo5: ShanghaiGasFees ] gasCosts(FkFrontier, base, BaseGasCosts) diff --git a/tests/customgenesis/blobschedule_bpo.json b/tests/customgenesis/blobschedule_bpo.json new file mode 100644 index 0000000000..c7f3ee09d0 --- /dev/null +++ b/tests/customgenesis/blobschedule_bpo.json @@ -0,0 +1,70 @@ +{ + "config": { + "chainId": 7042643276, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "mergeNetsplitBlock": 0, + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true, + "shanghaiTime": 0, + "cancunTime": 0, + "blobSchedule": { + "cancun": { + "target": 3, + "max": 6, + "baseFeeUpdateFraction": 3338477 + }, + "prague": { + "target": 6, + "max": 9, + "baseFeeUpdateFraction": 5007716 + }, + "osaka": { + "target": 6, + "max": 9, + "baseFeeUpdateFraction": 5007716 + }, + "bpo1": { + "target": 9, + "max": 12, + "baseFeeUpdateFraction": 5007716 + }, + "bpo2": { + "target": 12, + "max": 15, + "baseFeeUpdateFraction": 5007716 + }, + "bpo3": { + "target": 15, + "max": 18, + "baseFeeUpdateFraction": 5007716 + }, + "bpo4": { + "target": 6, + "max": 9, + "baseFeeUpdateFraction": 5007716 + }, + "bpo5": { + "target": 15, + "max": 20, + "baseFeeUpdateFraction": 5007716 + } + }, + "depositContractAddress": "0x00000000219ab540356cBB839Cbe05303d7705Fa", + "pragueTime": 0, + "osakaTime": 1748362704, + "bpo1Time": 1748461008, + "bpo2Time": 1748559312, + "bpo3Time": 1748657616, + "bpo4Time": 1748755920, + "bpo5Time": 1748872656 + } +} \ No newline at end of file diff --git a/tests/test_genesis.nim b/tests/test_genesis.nim index 86bfee1928..d024f9d406 100644 --- a/tests/test_genesis.nim +++ b/tests/test_genesis.nim @@ -186,6 +186,16 @@ proc customGenesisTest() = validateBlobSchedule(cg, Prague, 6, 9, 3338477) validateBlobSchedule(cg, Osaka, 6, 9, 3338477) + check loadNetworkParams("blobschedule_bpo.json".findFilePath, cg) + validateBlobSchedule(cg, Cancun, 3, 6, 3338477) + validateBlobSchedule(cg, Prague, 6, 9, 5007716) + validateBlobSchedule(cg, Osaka, 6, 9, 5007716) + validateBlobSchedule(cg, Bpo1, 9, 12, 5007716) + validateBlobSchedule(cg, Bpo2, 12, 15, 5007716) + validateBlobSchedule(cg, Bpo3, 15, 18, 5007716) + validateBlobSchedule(cg, Bpo4, 6, 9, 5007716) + validateBlobSchedule(cg, Bpo5, 15, 20, 5007716) + proc genesisMain() = genesisTest() customGenesisTest() diff --git a/tests/test_helpers.nim b/tests/test_helpers.nim index b881f80cd6..7aa352c77c 100644 --- a/tests/test_helpers.nim +++ b/tests/test_helpers.nim @@ -36,6 +36,11 @@ const "Cancun", # FkCancun "Prague", # FkPrague "Osaka", # FkOsaka + "Bpo1", # FkBpo1 + "Bpo2", # FkBpo2 + "Bpo3", # FkBpo3 + "Bpo4", # FkBpo4 + "Bpo5" # FkBpo5 ] nameToFork* = ForkToName.revTable From 1e0094f0ef828c279070cec1d010a8a5259b91d9 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Thu, 5 Jun 2025 06:36:24 +0530 Subject: [PATCH 070/138] applicable for bpos after osaka (#3366) --- execution_chain/evm/precompiles.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/execution_chain/evm/precompiles.nim b/execution_chain/evm/precompiles.nim index faed87e03a..6c7f1df90d 100644 --- a/execution_chain/evm/precompiles.nim +++ b/execution_chain/evm/precompiles.nim @@ -271,7 +271,7 @@ func modExp(c: Computation, fork: EVMFork = FkByzantium): EvmResultVoid = expLen = expL.safeInt modLen = modL.safeInt - if fork == FkOsaka: + if fork >= FkOsaka: # EIP-7823 if baseLen > 1024 or expLen > 1024 or modLen > 1024: return err(prcErr(PrcInvalidParam)) From 0ef2e8c9869b9831a94e2765068cb893ee09828f Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 10 Jun 2025 17:57:21 +0700 Subject: [PATCH 071/138] Transaction broadcast breakdown (#3372) Transaction broadcast implementation still need a lot of work to do. But looks like the blob transactions receiver now have a higher priority than the broadcast part. This PR helps by taking out safe changes with no algorithm nor logic changes from that bigger PR as a preparation for blob transactions receiver. --- execution_chain/networking/p2p.nim | 7 ++ execution_chain/networking/p2p_types.nim | 3 + execution_chain/networking/rlpx.nim | 23 +++++-- .../sync/wire_protocol/requester.nim | 54 ---------------- .../sync/wire_protocol/responder.nim | 2 + execution_chain/sync/wire_protocol/types.nim | 64 +++++++++++++++++-- 6 files changed, 87 insertions(+), 66 deletions(-) diff --git a/execution_chain/networking/p2p.nim b/execution_chain/networking/p2p.nim index 414a2108b2..4cc9b3ef90 100644 --- a/execution_chain/networking/p2p.nim +++ b/execution_chain/networking/p2p.nim @@ -214,6 +214,13 @@ proc randomPeerWith*(node: EthereumNode, Protocol: type): Peer = if candidates.len > 0: return candidates.rand() +iterator randomPeersWith*(node: EthereumNode, Protocol: type): Peer = + var peers = newSeqOfCap[Peer](node.peerPool.connectedNodes.len) + for peer in node.peers(Protocol): + peers.add(peer) + shuffle(peers) + for p in peers: yield p + proc getPeer*(node: EthereumNode, peerId: NodeId, Protocol: type): Opt[Peer] = for peer in node.peers(Protocol): if peer.remote.id == peerId: diff --git a/execution_chain/networking/p2p_types.nim b/execution_chain/networking/p2p_types.nim index 1d9e8dd237..2007cbf222 100644 --- a/execution_chain/networking/p2p_types.nim +++ b/execution_chain/networking/p2p_types.nim @@ -219,3 +219,6 @@ proc `$`*(v: Capability): string = v.name & "/" & $v.version proc toENode*(v: EthereumNode): ENode = ENode(pubkey: v.keys.pubkey, address: v.address) + +func id*(peer: Peer): NodeId = + peer.remote.id diff --git a/execution_chain/networking/rlpx.nim b/execution_chain/networking/rlpx.nim index b23ac52935..ff15833455 100644 --- a/execution_chain/networking/rlpx.nim +++ b/execution_chain/networking/rlpx.nim @@ -309,9 +309,18 @@ proc nextMsgResolver[MsgType]( msgData: Rlp, future: FutureBase ) {.gcsafe, raises: [RlpError].} = var reader = msgData - Future[MsgType](future).complete reader.readRecordType( - MsgType, MsgType.rlpFieldsCount > 1 - ) + when MsgType is ref: + # TODO: rlp support ref types + type T = typeof(MsgType()[]) + var msg = MsgType() + msg[] = reader.readRecordType( + T, T.rlpFieldsCount > 1 + ) + Future[MsgType](future).complete msg + else: + Future[MsgType](future).complete reader.readRecordType( + MsgType, MsgType.rlpFieldsCount > 1 + ) proc failResolver[MsgType](reason: DisconnectionReason, future: FutureBase) = Future[MsgType](future).fail( @@ -1353,11 +1362,11 @@ proc rlpxAccept*( rlpx_accept_failure.inc(labelValues = ["timeout"]) return nil except PeerDisconnected as exc: - debug "Accped handshake disconnection", err = exc.msg, reason = exc.reason + debug "Accept handshake disconnection", err = exc.msg, reason = exc.reason rlpx_accept_failure.inc(labelValues = [$exc.reason]) return nil except EthP2PError as exc: - debug "Accped handshake error", err = exc.msg + debug "Accept handshake error", err = exc.msg rlpx_accept_failure.inc(labelValues = ["error"]) return nil @@ -1521,7 +1530,7 @@ template rlpxWithPacketHandler*(PROTO: distinct type; wrapRlpxWithPacketException(MSGTYPE, peer): var rlp = data - packet {.inject.}: MSGTYPE + packet {.inject.} = MSGTYPE() when numFields > 1: tryEnterList(rlp) @@ -1552,7 +1561,7 @@ template rlpxWithFutureHandler*(PROTO: distinct type; wrapRlpxWithPacketException(MSGTYPE, peer): var rlp = data - packet: MSGTYPE + packet = MSGTYPE() tryEnterList(rlp) let diff --git a/execution_chain/sync/wire_protocol/requester.nim b/execution_chain/sync/wire_protocol/requester.nim index 270e035757..64b8f00bdb 100644 --- a/execution_chain/sync/wire_protocol/requester.nim +++ b/execution_chain/sync/wire_protocol/requester.nim @@ -34,60 +34,6 @@ defineProtocol(PROTO = eth69, peerState = Eth69PeerState, networkState = EthWireRef) -type - Status68Packet* = object - version*: uint64 - networkId*: NetworkId - totalDifficulty*: DifficultyInt - bestHash*: Hash32 - genesisHash*: Hash32 - forkId*: ChainForkId - - # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#status-0x00 - Status69Packet* = object - version*: uint64 - networkId*: NetworkId - genesisHash*: Hash32 - forkId*: ChainForkId - earliest*: uint64 # earliest available full block - latest*: uint64 # latest available full block - latestHash*: Hash32 # hash of latest available full block - - BlockHeadersPacket* = object - headers*: seq[Header] - - BlockBodiesPacket* = object - bodies*: seq[BlockBody] - - PooledTransactionsPacket* = object - transactions*: seq[PooledTransaction] - - ReceiptsPacket* = object - receipts*: seq[seq[Receipt]] - - StoredReceiptsPacket* = object - receipts*: seq[seq[StoredReceipt]] - - NewBlockHashesPacket* = object - hashes*: seq[NewBlockHashesAnnounce] - - NewBlockPacket* = object - blk*: EthBlock - totalDifficulty*: DifficultyInt - - TransactionsPacket* = object - transactions*: seq[Transaction] - - NewPooledTransactionHashesPacket* = object - txTypes*: seq[byte] - txSizes*: seq[uint64] - txHashes*: seq[Hash32] - - BlockRangeUpdatePacket* = object - earliest*: uint64 - latest*: uint64 - latestHash*: Hash32 - const StatusMsg* = 0'u64 NewBlockHashesMsg* = 1'u64 diff --git a/execution_chain/sync/wire_protocol/responder.nim b/execution_chain/sync/wire_protocol/responder.nim index ad8ee14c95..fb5e4d8773 100644 --- a/execution_chain/sync/wire_protocol/responder.nim +++ b/execution_chain/sync/wire_protocol/responder.nim @@ -11,6 +11,8 @@ import stint, chronicles, stew/byteutils, + eth/common/transactions_rlp, + ./types, ./handler, ./requester, ./trace_config, diff --git a/execution_chain/sync/wire_protocol/types.nim b/execution_chain/sync/wire_protocol/types.nim index b179475eb0..ba958b4af5 100644 --- a/execution_chain/sync/wire_protocol/types.nim +++ b/execution_chain/sync/wire_protocol/types.nim @@ -12,12 +12,58 @@ import eth/common, - ../../core/[chain, tx_pool] + ../../core/[chain, tx_pool, pooled_txs], + ../../networking/p2p_types type + Status68Packet* = object + version*: uint64 + networkId*: NetworkId + totalDifficulty*: DifficultyInt + bestHash*: Hash32 + genesisHash*: Hash32 + forkId*: ChainForkId + + # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#status-0x00 + Status69Packet* = object + version*: uint64 + networkId*: NetworkId + genesisHash*: Hash32 + forkId*: ChainForkId + earliest*: uint64 # earliest available full block + latest*: uint64 # latest available full block + latestHash*: Hash32 # hash of latest available full block + + BlockHeadersPacket* = object + headers*: seq[Header] + + BlockBodiesPacket* = object + bodies*: seq[BlockBody] + + PooledTransactionsPacket* = object + transactions*: seq[PooledTransaction] + + ReceiptsPacket* = object + receipts*: seq[seq[Receipt]] + + NewBlockHashesPacket* = object + hashes*: seq[NewBlockHashesAnnounce] + + NewBlockPacket* = object + blk*: EthBlock + totalDifficulty*: DifficultyInt + + TransactionsPacket* = ref object + transactions*: seq[Transaction] + + NewPooledTransactionHashesPacket* = ref object + txTypes*: seq[byte] + txSizes*: seq[uint64] + txHashes*: seq[Hash32] + NewBlockHashesAnnounce* = object hash*: Hash32 - number*: BlockNumber + number*: base.BlockNumber ChainForkId* = object forkHash*: array[4, byte] # The RLP encoding must be exactly 4 bytes. @@ -49,15 +95,23 @@ type maxResults*, skip*: uint reverse*: bool - BlockBodiesRequest* =object + BlockBodiesRequest* = object blockHashes*: seq[Hash32] - PooledTransactionsRequest* =object + PooledTransactionsRequest* = object txHashes*: seq[Hash32] - ReceiptsRequest* =object + ReceiptsRequest* = object blockHashes*: seq[Hash32] + StoredReceiptsPacket* = object + receipts*: seq[seq[StoredReceipt]] + + BlockRangeUpdatePacket* = object + earliest*: uint64 + latest*: uint64 + latestHash*: Hash32 + EthWireRef* = ref object of RootRef chain* : ForkedChainRef txPool*: TxPoolRef From ba3c725ceb2b0fef1b6cf582f160214b005940c4 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Tue, 10 Jun 2025 15:02:09 +0200 Subject: [PATCH 072/138] EIP-7825 : Transaction Gas Limit Cap (#3358) * implement eip-7825 * shift to validateTxBasic --- execution_chain/constants.nim | 3 +++ execution_chain/core/tx_pool/tx_desc.nim | 4 ++-- execution_chain/core/validate.nim | 4 ++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/execution_chain/constants.nim b/execution_chain/constants.nim index 7fb60cdd66..b31d64334b 100644 --- a/execution_chain/constants.nim +++ b/execution_chain/constants.nim @@ -45,6 +45,9 @@ const GAS_LIMIT_MAXIMUM* = int64.high.GasInt # Maximum the gas limit (2^63-1). DEFAULT_GAS_LIMIT* = 36_000_000 + # https://eips.ethereum.org/EIPS/eip-7825 + TX_GAS_LIMIT* = 30_000_000 + EMPTY_SHA3* = hash32"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" GAS_MOD_EXP_QUADRATIC_DENOMINATOR* = 20.u256 diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index 6d476e5771..b104790e60 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -176,10 +176,10 @@ proc getNonce(xp: TxPoolRef; account: Address): AccountNonce = proc classifyValid(xp: TxPoolRef; tx: Transaction, sender: Address): bool = - if tx.gasLimit > xp.gasLimit: + if tx.gasLimit > TX_GAS_LIMIT: debug "Invalid transaction: Gas limit too high", txGasLimit = tx.gasLimit, - gasLimit = xp.gasLimit + gasLimit = TX_GAS_LIMIT return false if tx.txType == TxEip4844: diff --git a/execution_chain/core/validate.nim b/execution_chain/core/validate.nim index c14fb63cc4..a30e8b7422 100644 --- a/execution_chain/core/validate.nim +++ b/execution_chain/core/validate.nim @@ -211,6 +211,10 @@ func validateTxBasic*( fork: EVMFork, validateFork: bool = true): Result[void, string] = + # https://eips.ethereum.org/EIPS/eip-7825 + if fork >= FkOsaka and tx.gasLimit > TX_GAS_LIMIT: + return err("tx.gasLimit " & $tx.gasLimit & " exceeds maximum " & $TX_GAS_LIMIT) + if validateFork: if tx.txType == TxEip2930 and fork < FkBerlin: return err("invalid tx: Eip2930 Tx type detected before Berlin") From bee4b8420ce0621506513397871d89a55e76b331 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Tue, 10 Jun 2025 16:29:36 +0200 Subject: [PATCH 073/138] EIP-7918 : Blob base fee bounded by execution cost (#3356) * implement eip-7918 * fix: misreading * fix: spec mismatch * fix excessblobgas calculation * spec fix --- execution_chain/constants.nim | 1 + execution_chain/core/eip4844.nim | 29 +++++++++++++++++------------ 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/execution_chain/constants.nim b/execution_chain/constants.nim index b31d64334b..8854f5ead5 100644 --- a/execution_chain/constants.nim +++ b/execution_chain/constants.nim @@ -91,6 +91,7 @@ const MIN_BLOB_GASPRICE* = 1'u64 MAX_BLOB_GAS_PER_BLOCK* = 786432 MAX_BLOBS_PER_BLOCK* = int(MAX_BLOB_GAS_PER_BLOCK div GAS_PER_BLOB) + BLOB_BASE_COST* = (1 shl 14).uint64 # 2^14 # EIP-4788 addresses # BEACON_ROOTS_ADDRESS is the address where historical beacon roots are stored as per EIP-4788 diff --git a/execution_chain/core/eip4844.nim b/execution_chain/core/eip4844.nim index 77ebc91b5a..17b0fa5157 100644 --- a/execution_chain/core/eip4844.nim +++ b/execution_chain/core/eip4844.nim @@ -81,18 +81,6 @@ proc pointEvaluation*(input: openArray[byte]): Result[void, string] = ok() -# calcExcessBlobGas implements calc_excess_data_gas from EIP-4844 -proc calcExcessBlobGas*(com: CommonRef, parent: Header, fork: EVMFork): uint64 = - let - excessBlobGas = parent.excessBlobGas.get(0'u64) - blobGasUsed = parent.blobGasUsed.get(0'u64) - targetBlobGasPerBlock = com.getTargetBlobsPerBlock(fork) * GAS_PER_BLOB - - if excessBlobGas + blobGasUsed < targetBlobGasPerBlock: - 0'u64 - else: - excessBlobGas + blobGasUsed - targetBlobGasPerBlock - # fakeExponential approximates factor * e ** (num / denom) using a taylor expansion # as described in the EIP-4844 spec. func fakeExponential*(factor, numerator, denominator: UInt256): UInt256 = @@ -136,6 +124,23 @@ func blobGasUsed(txs: openArray[Transaction]): uint64 = for tx in txs: result += tx.getTotalBlobGas +# calcExcessBlobGas implements calc_excess_data_gas from EIP-4844 +proc calcExcessBlobGas*(com: CommonRef, parent: Header, fork: EVMFork): uint64 = + let + excessBlobGas = parent.excessBlobGas.get(0'u64) + blobGasUsed = parent.blobGasUsed.get(0'u64) + targetBlobsPerBlock = com.getTargetBlobsPerBlock(fork) + maxBlobsPerBlock = com.getMaxBlobsPerBlock(fork) + targetBlobGasPerBlock = targetBlobsPerBlock * GAS_PER_BLOB + if excessBlobGas + blobGasUsed < targetBlobGasPerBlock: + return 0'u64 + + # https://eips.ethereum.org/EIPS/eip-7918 + if fork >= FkOsaka and (BLOB_BASE_COST.u256 * parent.baseFeePerGas.get(0.u256)) > GAS_PER_BLOB.u256 * getBlobBaseFee(excessBlobGas, com, fork): + return excessBlobGas + blobGasUsed * (maxBlobsPerBlock - targetBlobsPerBlock) div maxBlobsPerBlock + + return excessBlobGas + blobGasUsed - targetBlobGasPerBlock + # https://eips.ethereum.org/EIPS/eip-4844 func validateEip4844Header*( com: CommonRef, header, parentHeader: Header, From 64d14b3c70e3ab27ee577d35125aeaeb7d84004d Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 10 Jun 2025 23:28:35 +0800 Subject: [PATCH 074/138] Remove rocksdb from verified proxy builds (#3374) --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index b78c1836c4..bb7d5efaef 100644 --- a/Makefile +++ b/Makefile @@ -339,17 +339,17 @@ portal_bridge: | build deps rocksdb # Nimbus Verified Proxy related targets # Builds the nimbus_verified_proxy -nimbus_verified_proxy: | build deps rocksdb +nimbus_verified_proxy: | build deps echo -e $(BUILD_MSG) "build/$@" && \ $(ENV_SCRIPT) nim nimbus_verified_proxy $(NIM_PARAMS) nimbus.nims # builds and runs the nimbus_verified_proxy test suite -nimbus-verified-proxy-test: | build deps rocksdb +nimbus-verified-proxy-test: | build deps $(ENV_SCRIPT) nim nimbus_verified_proxy_test $(NIM_PARAMS) nimbus.nims # Shared library for verified proxy -libverifproxy: | build deps rocksdb +libverifproxy: | build deps + echo -e $(BUILD_MSG) "build/$@" && \ $(ENV_SCRIPT) nim --version && \ $(ENV_SCRIPT) nim c --app:lib -d:"libp2p_pki_schemes=secp256k1" --noMain:on --threads:on --nimcache:nimcache/libverifproxy -o:$(VERIF_PROXY_OUT_PATH)/$@.$(SHAREDLIBEXT) $(NIM_PARAMS) nimbus_verified_proxy/libverifproxy/verifproxy.nim From 6ce4568e628ea33b9e3d55e035e5e68df7417a39 Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 11 Jun 2025 13:31:58 +0700 Subject: [PATCH 075/138] Query and store blob transactions into txpool (#3375) * Query and store blob transactions into txpool Also a breakdown of transaction broadcast. But this time it's only handle incoming transactions, both regular and blob transactions. No rebroadcast whatsoever. It is a good change to let this incoming transactions handling become stable before we add the real broadcast things. * Additional validation: compare announced vs received stat --- execution_chain/core/tx_pool.nim | 5 +- execution_chain/core/tx_pool/tx_desc.nim | 5 +- execution_chain/nimbus_desc.nim | 4 + execution_chain/nimbus_execution_client.nim | 2 +- execution_chain/sync/wire_protocol.nim | 2 + .../sync/wire_protocol/broadcast.nim | 294 ++++++++++++++++++ .../sync/wire_protocol/handler.nim | 31 +- .../sync/wire_protocol/responder.nim | 17 +- execution_chain/sync/wire_protocol/setup.nim | 4 +- execution_chain/sync/wire_protocol/types.nim | 16 + .../nodocker/engine/engine_env.nim | 8 +- 11 files changed, 354 insertions(+), 34 deletions(-) create mode 100644 execution_chain/sync/wire_protocol/broadcast.nim diff --git a/execution_chain/core/tx_pool.nim b/execution_chain/core/tx_pool.nim index efba068c3f..ac2403cc41 100644 --- a/execution_chain/core/tx_pool.nim +++ b/execution_chain/core/tx_pool.nim @@ -63,8 +63,7 @@ export tx, # : Transaction pooledTx, # : PooledTransaction id, # : Hash32 - sender, # : Address - nextFork # : EVMFork + sender # : Address # ------------------------------------------------------------------------------ # TxPoolRef constructor @@ -95,6 +94,7 @@ export export addTx, getItem, + contains, removeTx, removeExpiredTxs, getBlobAndProofV1, @@ -102,6 +102,7 @@ export # addTx(xp: TxPoolRef, ptx: PooledTransaction): Result[void, TxError] # addTx(xp: TxPoolRef, tx: Transaction): Result[void, TxError] +# contains(xp: TxPoolRef, id: Hash32): bool # getItem(xp: TxPoolRef, id: Hash32): Result[TxItemRef, TxError] # removeTx(xp: TxPoolRef, id: Hash32) # removeExpiredTxs(xp: TxPoolRef, lifeTime: Duration) diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index b104790e60..cc841b2484 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -162,9 +162,6 @@ func baseFee(xp: TxPoolRef): GasInt = else: 0.GasInt -func gasLimit(xp: TxPoolRef): GasInt = - xp.vmState.blockCtx.gasLimit - func excessBlobGas(xp: TxPoolRef): GasInt = xp.vmState.blockCtx.excessBlobGas @@ -312,6 +309,8 @@ proc updateVmState*(xp: TxPoolRef) = # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ +proc contains*(xp: TxPoolRef, id: Hash32): bool = + xp.idTab.hasKey(id) proc getItem*(xp: TxPoolRef, id: Hash32): Result[TxItemRef, TxError] = let item = xp.idTab.getOrDefault(id) diff --git a/execution_chain/nimbus_desc.nim b/execution_chain/nimbus_desc.nim index 194d6f4eb2..478afd6654 100644 --- a/execution_chain/nimbus_desc.nim +++ b/execution_chain/nimbus_desc.nim @@ -18,6 +18,7 @@ import ./core/tx_pool, ./sync/peers, ./sync/beacon as beacon_sync, + ./sync/wire_protocol, ./beacon/beacon_engine, ./common, ./config @@ -52,6 +53,7 @@ type beaconSyncRef*: BeaconSyncRef beaconEngine*: BeaconEngineRef metricsServer*: MetricsHttpServerRef + wire*: EthWireRef {.push gcsafe, raises: [].} @@ -70,6 +72,8 @@ proc stop*(nimbus: NimbusNode, conf: NimbusConf) {.async, gcsafe.} = waitedFutures.add nimbus.beaconSyncRef.stop() if nimbus.metricsServer.isNil.not: waitedFutures.add nimbus.metricsServer.stop() + if nimbus.wire.isNil.not: + waitedFutures.add nimbus.wire.stop() waitedFutures.add nimbus.fc.stopProcessingQueue() diff --git a/execution_chain/nimbus_execution_client.nim b/execution_chain/nimbus_execution_client.nim index 07a7d608c1..ede5765957 100644 --- a/execution_chain/nimbus_execution_client.nim +++ b/execution_chain/nimbus_execution_client.nim @@ -114,7 +114,7 @@ proc setupP2P(nimbus: NimbusNode, conf: NimbusConf, rng = nimbus.ctx.rng) # Add protocol capabilities - nimbus.ethNode.addEthHandlerCapability(nimbus.txPool) + nimbus.wire = nimbus.ethNode.addEthHandlerCapability(nimbus.txPool) # Always initialise beacon syncer nimbus.beaconSyncRef = BeaconSyncRef.init( diff --git a/execution_chain/sync/wire_protocol.nim b/execution_chain/sync/wire_protocol.nim index 9a86ea5038..8d062cb280 100644 --- a/execution_chain/sync/wire_protocol.nim +++ b/execution_chain/sync/wire_protocol.nim @@ -10,11 +10,13 @@ import ./wire_protocol/requester, ./wire_protocol/responder, + ./wire_protocol/broadcast, ./wire_protocol/types, ./wire_protocol/setup export requester, responder, + broadcast, types, setup diff --git a/execution_chain/sync/wire_protocol/broadcast.nim b/execution_chain/sync/wire_protocol/broadcast.nim new file mode 100644 index 0000000000..ce7e0ab433 --- /dev/null +++ b/execution_chain/sync/wire_protocol/broadcast.nim @@ -0,0 +1,294 @@ +# nimbus-execution-client +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +import + std/[tables, sets, times, sequtils, random], + chronos, + chronos/ratelimit, + chronicles, + eth/common/hashes, + eth/common/times, + results, + ./types, + ./requester, + ../../networking/p2p, + ../../core/tx_pool, + ../../core/pooled_txs_rlp, + ../../core/eip4844, + ../../core/eip7594, + ../../core/chain/forked_chain + +logScope: + topics = "tx-broadcast" + +const + maxOperationQuota = 1000000 + fullReplenishTime = chronos.seconds(5) + POOLED_STORAGE_TIME_LIMIT = initDuration(minutes = 20) + cleanupTicker = chronos.minutes(5) + # https://github.com/ethereum/devp2p/blob/b0c213de97978053a0f62c3ea4d23c0a3d8784bc/caps/eth.md#blockrangeupdate-0x11 + blockRangeUpdateTicker = chronos.minutes(2) + SOFT_RESPONSE_LIMIT* = 2 * 1024 * 1024 + +template awaitQuota(bcParam: EthWireRef, costParam: float, protocolIdParam: string) = + let + wire = bcParam + cost = int(costParam) + protocolId = protocolIdParam + + try: + if not wire.quota.tryConsume(cost): + debug "Awaiting broadcast quota", cost = cost, protocolId = protocolId + await wire.quota.consume(cost) + except CancelledError as exc: + raise exc + except CatchableError as exc: + debug "Error while waiting broadcast quota", + cost = cost, protocolId = protocolId, msg = exc.msg + +template reqisterAction(wire: EthWireRef, actionDesc: string, body) = + block: + proc actionHandler(): Future[void] {.async: (raises: [CancelledError]).} = + debug "Invoking broadcast action", desc=actionDesc + body + + await wire.actionQueue.addLast(actionHandler) + +func allowedOpsPerSecondCost(n: int): float = + const replenishRate = (maxOperationQuota / fullReplenishTime.nanoseconds.float) + (replenishRate * 1000000000'f / n.float) + +const + txPoolProcessCost = allowedOpsPerSecondCost(1000) + hashLookupCost = allowedOpsPerSecondCost(2000) + blockRangeUpdateCost = allowedOpsPerSecondCost(20) + +iterator peers69OrLater(wire: EthWireRef, random: bool = false): Peer = + var peers = newSeqOfCap[Peer](wire.node.numPeers) + for peer in wire.node.peers: + if peer.isNil: + continue + if peer.supports(eth69): + peers.add peer + if random: + shuffle(peers) + for peer in peers: + if peer.connectionState != ConnectionState.Connected: + continue + yield peer + +proc syncerRunning*(wire: EthWireRef): bool = + # Disable transactions gossip and processing when + # the syncer is still busy + const + thresholdTime = 3 * 15 + + let + nowTime = EthTime.now() + headerTime = wire.chain.latestHeader.timestamp + + let running = (nowTime - headerTime) > thresholdTime + if running != not wire.gossipEnabled: + wire.gossipEnabled = not running + notice "Transaction broadcast state changed", enabled = wire.gossipEnabled + + running + +proc handleTransactionsBroadcast*(wire: EthWireRef, + packet: TransactionsPacket, + peer: Peer) {.async: (raises: [CancelledError]).} = + if wire.syncerRunning: + return + + if packet.transactions.len == 0: + return + + debug "received new transactions", + number = packet.transactions.len + + wire.reqisterAction("TxPool consume incoming transactions"): + for tx in packet.transactions: + if tx.txType == TxEip4844: + # Disallow blob transaction broadcast + debug "Protocol Breach: Peer broadcast blob transaction", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + wire.txPool.addTx(tx).isOkOr: + continue + + awaitQuota(wire, txPoolProcessCost, "adding into txpool") + +proc handleTxHashesBroadcast*(wire: EthWireRef, + packet: NewPooledTransactionHashesPacket, + peer: Peer) {.async: (raises: [CancelledError]).} = + if wire.syncerRunning: + return + + if packet.txHashes.len == 0: + return + + debug "received new pooled tx hashes", + hashes = packet.txHashes.len + + if packet.txHashes.len != packet.txSizes.len or + packet.txHashes.len != packet.txTypes.len: + debug "Protocol Breach: new pooled tx hashes invalid params", + hashes = packet.txHashes.len, + sizes = packet.txSizes.len, + types = packet.txTypes.len + await peer.disconnect(BreachOfProtocol) + return + + wire.reqisterAction("Handle broadcast transactions hashes"): + var + i = 0 + + while i < packet.txHashes.len: + var + msg: PooledTransactionsRequest + res: Opt[PooledTransactionsPacket] + sizes: seq[uint64] + types: seq[byte] + sumSize = 0'u64 + + while i < packet.txHashes.len: + let size = packet.txSizes[i] + if sumSize + size > SOFT_RESPONSE_LIMIT.uint64: + break + + let txHash = packet.txHashes[i] + if txHash notin wire.txPool: + msg.txHashes.add txHash + sumSize += size + sizes.add size + types.add packet.txTypes[i] + + awaitQuota(wire, hashLookupCost, "check transaction exists in pool") + inc i + + try: + res = await peer.getPooledTransactions(msg) + except EthP2PError as exc: + debug "request pooled transactions failed", + msg=exc.msg + + if res.isNone: + debug "request pooled transactions get nothing" + return + + let + ptx = res.get() + + for i, tx in ptx.transactions: + # If we receive any blob transactions missing sidecars, or with + # sidecars that don't correspond to the versioned hashes reported + # in the header, disconnect from the sending peer. + if tx.tx.txType.byte != types[i]: + debug "Protocol Breach: Received transaction with type differ from announced", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + let (size, hash) = getEncodedLengthAndHash(tx) + if size.uint64 != sizes[i]: + debug "Protocol Breach: Received transaction with size differ from announced", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + if hash != msg.txHashes[i]: + debug "Protocol Breach: Received transaction with hash differ from announced", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + if tx.tx.txType == TxEip4844: + if tx.blobsBundle.isNil: + debug "Protocol Breach: Received sidecar-less blob transaction", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + if tx.blobsBundle.wrapperVersion == WrapperVersionEIP4844: + validateBlobTransactionWrapper4844(tx).isOkOr: + debug "Protocol Breach: Sidecar validation error", msg=error, + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + if tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594: + validateBlobTransactionWrapper7594(tx).isOkOr: + debug "Protocol Breach: Sidecar validation error", msg=error, + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + + wire.txPool.addTx(tx).isOkOr: + continue + + awaitQuota(wire, txPoolProcessCost, "broadcast transactions hashes") + +proc tickerLoop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = + while true: + let + cleanup = sleepAsync(cleanupTicker) + update = sleepAsync(blockRangeUpdateTicker) + res = await one(cleanup, update) + + if res == cleanup: + wire.reqisterAction("Periodical cleanup"): + var expireds: seq[Hash32] + for key, seen in wire.seenTransactions: + if getTime() - seen.lastSeen > POOLED_STORAGE_TIME_LIMIT: + expireds.add key + awaitQuota(wire, hashLookupCost, "broadcast transactions hashes") + + for expire in expireds: + wire.seenTransactions.del(expire) + awaitQuota(wire, hashLookupCost, "broadcast transactions hashes") + + if res == update: + wire.reqisterAction("Periodical blockRangeUpdate"): + let + packet = BlockRangeUpdatePacket( + earliest: 0, + latest: wire.chain.latestNumber, + latestHash: wire.chain.latestHash, + ) + + for peer in wire.peers69OrLater: + try: + await peer.blockRangeUpdate(packet) + except EthP2PError as exc: + debug "broadcast block range update failed", + msg=exc.msg + awaitQuota(wire, blockRangeUpdateCost, "broadcast blockRangeUpdate") + +proc setupTokenBucket*(): TokenBucket = + TokenBucket.new(maxOperationQuota.int, fullReplenishTime) + +proc actionLoop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = + while true: + let action = await wire.actionQueue.popFirst() + await action() + +proc stop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = + var waitedFutures = @[ + wire.tickerHeartbeat.cancelAndWait(), + wire.actionHeartbeat.cancelAndWait(), + ] + + let + timeout = chronos.seconds(5) + completed = await withTimeout(allFutures(waitedFutures), timeout) + if not completed: + trace "Broadcast.stop(): timeout reached", timeout, + futureErrors = waitedFutures.filterIt(it.error != nil).mapIt(it.error.msg) diff --git a/execution_chain/sync/wire_protocol/handler.nim b/execution_chain/sync/wire_protocol/handler.nim index f266592dd2..dab3e5a810 100644 --- a/execution_chain/sync/wire_protocol/handler.nim +++ b/execution_chain/sync/wire_protocol/handler.nim @@ -15,6 +15,7 @@ import stew/endians2, ./types, ./requester, + ./broadcast, ../../core/[chain, tx_pool, pooled_txs_rlp], ../../networking/p2p @@ -27,18 +28,26 @@ const MAX_BODIES_SERVE = 256 # https://github.com/ethereum/devp2p/blob/master/caps/eth.md#getpooledtransactions-0x09 MAX_TXS_SERVE = 256 - SOFT_RESPONSE_LIMIT = 2 * 1024 * 1024 + MAX_ACTION_HANDLER = 128 # ------------------------------------------------------------------------------ # Public constructor/destructor # ------------------------------------------------------------------------------ proc new*(_: type EthWireRef, - txPool: TxPoolRef): EthWireRef = - EthWireRef( - chain: txPool.chain, - txPool: txPool + txPool: TxPoolRef, + node: EthereumNode): EthWireRef = + let wire = EthWireRef( + chain : txPool.chain, + txPool: txPool, + node : node, + quota : setupTokenBucket(), + actionQueue : newAsyncQueue[ActionHandler](maxsize = MAX_ACTION_HANDLER), ) + wire.tickerHeartbeat = tickerLoop(wire) + wire.actionHeartbeat = actionLoop(wire) + wire.gossipEnabled = not syncerRunning(wire) + wire # ------------------------------------------------------------------------------ # Public functions: eth wire protocol handlers @@ -215,18 +224,6 @@ proc getBlockHeaders*(ctx: EthWireRef, move(list) -proc handleAnnouncedTxs*(ctx: EthWireRef, - packet: TransactionsPacket) = - if packet.transactions.len == 0: - return - - debug "received new transactions", - number = packet.transactions.len - - for tx in packet.transactions: - ctx.txPool.addTx(tx).isOkOr: - continue - # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/wire_protocol/responder.nim b/execution_chain/sync/wire_protocol/responder.nim index fb5e4d8773..9172df6b75 100644 --- a/execution_chain/sync/wire_protocol/responder.nim +++ b/execution_chain/sync/wire_protocol/responder.nim @@ -15,6 +15,7 @@ import ./types, ./handler, ./requester, + ./broadcast, ./trace_config, ../../utils/utils, ../../common/logging, @@ -124,7 +125,7 @@ proc transactionsUserHandler[PROTO](peer: Peer; packet: TransactionsPacket) {. trace trEthRecvReceived & "Transactions (0x02)", peer, transactions = packet.transactions.len let ctx = peer.networkState(PROTO) - ctx.handleAnnouncedTxs(packet) + await ctx.handleTransactionsBroadcast(packet, peer) proc transactionsThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = @@ -203,19 +204,23 @@ proc newBlockThunk[PROTO](peer: Peer; data: Rlp) {. await newBlockUserHandler(peer, packet) -proc newPooledTransactionHashesUserHandler(peer: Peer; packet: NewPooledTransactionHashesPacket) {. +proc newPooledTransactionHashesUserHandler(peer: Peer; + wire: EthWireRef; + packet: NewPooledTransactionHashesPacket) {. async: (raises: [CancelledError, EthP2PError]).} = when trEthTraceGossipOk: trace trEthRecvReceived & "NewPooledTransactionHashes (0x08)", peer, txTypes = packet.txTypes.toHex, txSizes = packet.txSizes.toStr, hashes = packet.txHashes.len + await wire.handleTxHashesBroadcast(packet, peer) proc newPooledTransactionHashesThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = PROTO.rlpxWithPacketHandler(NewPooledTransactionHashesPacket, peer, data, [txTypes, txSizes, txHashes]): - await newPooledTransactionHashesUserHandler(peer, packet) + let wire = peer.networkState(PROTO) + await newPooledTransactionHashesUserHandler(peer, wire, packet) proc getPooledTransactionsUserHandler[PROTO](response: Responder; @@ -297,9 +302,9 @@ proc blockRangeUpdateUserHandler(peer: Peer; packet: BlockRangeUpdatePacket) {. peer.state(eth69).latest = packet.latest peer.state(eth69).latestHash = packet.latestHash -proc blockRangeUpdateThunk(peer: Peer; data: Rlp) {. +proc blockRangeUpdateThunk[PROTO](peer: Peer; data: Rlp) {. async: (raises: [CancelledError, EthP2PError]).} = - eth68.rlpxWithPacketHandler(BlockRangeUpdatePacket, + PROTO.rlpxWithPacketHandler(BlockRangeUpdatePacket, peer, data, [earliest, latest, latestHash]): await blockRangeUpdateUserHandler(peer, packet) @@ -444,7 +449,7 @@ proc eth69Registration() = status69Thunk, Status69Packet) registerCommonThunk(protocol, eth69) registerMsg(protocol, BlockRangeUpdateMsg, "blockRangeUpdate", - blockRangeUpdateThunk, BlockRangeUpdatePacket) + blockRangeUpdateThunk[eth69], BlockRangeUpdatePacket) registerProtocol(protocol) eth68Registration() diff --git a/execution_chain/sync/wire_protocol/setup.nim b/execution_chain/sync/wire_protocol/setup.nim index c5e3847e38..20d75d0288 100644 --- a/execution_chain/sync/wire_protocol/setup.nim +++ b/execution_chain/sync/wire_protocol/setup.nim @@ -22,9 +22,9 @@ import proc addEthHandlerCapability*( node: EthereumNode; txPool: TxPoolRef; - ) = + ): EthWireRef = ## Install wire prototcol handlers for each cap. - let wire = EthWireRef.new(txPool) + let wire = EthWireRef.new(txPool, node) node.addCapability(eth68, wire) node.addCapability(eth69, wire) diff --git a/execution_chain/sync/wire_protocol/types.nim b/execution_chain/sync/wire_protocol/types.nim index ba958b4af5..2bb38d92dd 100644 --- a/execution_chain/sync/wire_protocol/types.nim +++ b/execution_chain/sync/wire_protocol/types.nim @@ -11,7 +11,10 @@ {.push raises: [].} import + std/[sets, tables, times], eth/common, + chronos, + chronos/ratelimit, ../../core/[chain, tx_pool, pooled_txs], ../../networking/p2p_types @@ -112,6 +115,19 @@ type latest*: uint64 latestHash*: Hash32 + SeenObject* = ref object + lastSeen*: Time + peers*: HashSet[NodeId] + + ActionHandler* = proc(): Future[void] {.async: (raises: [CancelledError]).} + EthWireRef* = ref object of RootRef chain* : ForkedChainRef txPool*: TxPoolRef + node* : EthereumNode + quota* : TokenBucket + seenTransactions*: Table[Hash32, SeenObject] + tickerHeartbeat*: Future[void].Raising([CancelledError]) + actionHeartbeat*: Future[void].Raising([CancelledError]) + actionQueue*: AsyncQueue[ActionHandler] + gossipEnabled*: bool diff --git a/hive_integration/nodocker/engine/engine_env.nim b/hive_integration/nodocker/engine/engine_env.nim index 94919e256d..a55bc0bee8 100644 --- a/hive_integration/nodocker/engine/engine_env.nim +++ b/hive_integration/nodocker/engine/engine_env.nim @@ -47,6 +47,7 @@ type client : RpcHttpClient txPool : TxPoolRef chain : ForkedChainRef + wire : EthWireRef const baseFolder = "hive_integration/nodocker/engine" @@ -84,8 +85,7 @@ proc newEngineEnv*(conf: var NimbusConf, chainFile: string, enableAuth: bool): E com = makeCom(conf) chain = ForkedChainRef.init(com, enableQueue = true) txPool = TxPoolRef.new(chain) - - node.addEthHandlerCapability(txPool) + wire = node.addEthHandlerCapability(txPool) var key: JwtSharedKey key.fromHex(jwtSecret).isOkOr: @@ -126,13 +126,15 @@ proc newEngineEnv*(conf: var NimbusConf, chainFile: string, enableAuth: bool): E server : server, client : client, txPool : txPool, - chain : chain + chain : chain, + wire : wire, ) proc close*(env: EngineEnv) = waitFor env.node.closeWait() waitFor env.client.close() waitFor env.server.closeWait() + waitFor env.wire.stop() waitFor env.chain.stopProcessingQueue() proc setRealTTD*(env: EngineEnv) = From 18af83da7177d3e97d5a8719486ae1e19aae6814 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 11 Jun 2025 19:32:40 +0800 Subject: [PATCH 076/138] Portal client: Add accept metadata to neighborhood/random gossip and PutContent JSON-RPC endpoints (#3342) --- .../bridge/history/portal_history_bridge.nim | 5 +- portal/network/wire/portal_protocol.nim | 56 ++++++++++++++---- portal/rpc/rpc_portal_beacon_api.nim | 19 ++++-- portal/rpc/rpc_portal_history_api.nim | 15 ++++- portal/rpc/rpc_portal_state_api.nim | 15 ++++- portal/rpc/rpc_types.nim | 12 +++- .../test_portal_wire_protocol.nim | 59 +++++++++++++++++-- 7 files changed, 152 insertions(+), 29 deletions(-) diff --git a/portal/bridge/history/portal_history_bridge.nim b/portal/bridge/history/portal_history_bridge.nim index 09f0d7b19c..156829f650 100644 --- a/portal/bridge/history/portal_history_bridge.nim +++ b/portal/bridge/history/portal_history_bridge.nim @@ -506,10 +506,11 @@ proc runHistory*(config: PortalBridgeConf) = contentValueHex = contentValue.toHex() try: - let peers = await bridge.portalClient.portal_historyPutContent( + let putContentResult = await bridge.portalClient.portal_historyPutContent( contentKeyHex, contentValueHex ) - debug "Content gossiped", peers, contentKey = contentKeyHex + debug "Content gossiped", + peers = putContentResult.peerCount, contentKey = contentKeyHex except CancelledError as e: trace "Cancelled gossipWorker" raise e diff --git a/portal/network/wire/portal_protocol.nim b/portal/network/wire/portal_protocol.nim index 249f87c15c..dc41a1b2e0 100644 --- a/portal/network/wire/portal_protocol.nim +++ b/portal/network/wire/portal_protocol.nim @@ -196,6 +196,15 @@ type of Database: contentKeys: ContentKeysList + OfferBatchMetadata* = object + successCount*: int + acceptedCount*: int + genericDeclineCount*: int + alreadyStoredCount*: int + notWithinRadiusCount*: int + rateLimitedCount*: int + transferInProgressCount*: int + PortalProtocol* = ref object of TalkProtocol protocolId*: PortalProtocolId routingTable*: RoutingTable @@ -1752,17 +1761,38 @@ proc queryRandom*( ## Perform a query for a random target, return all nodes discovered. p.query(NodeId.random(p.baseProtocol.rng[])) -proc offerBatchGetPeerCount*( +proc offerBatchGetMetadata*( p: PortalProtocol, offers: seq[OfferRequest] -): Future[int] {.async: (raises: [CancelledError]).} = +): Future[OfferBatchMetadata] {.async: (raises: [CancelledError]).} = let futs = await allFinished(offers.mapIt(p.offerRateLimited(it))) - var peerCount = 0 + var metadata: OfferBatchMetadata for f in futs: - if f.completed() and f.value().isOk(): - inc peerCount # only count successful offers + if not f.completed(): + continue + let acceptCodes = f.value().valueOr: + continue + + inc metadata.successCount + + for code in acceptCodes: + case code + of Accepted: + inc metadata.acceptedCount + of DeclinedGeneric: + inc metadata.genericDeclineCount + of DeclinedAlreadyStored: + inc metadata.alreadyStoredCount + of DeclinedNotWithinRadius: + inc metadata.notWithinRadiusCount + of DeclinedRateLimited: + inc metadata.rateLimitedCount + of DeclinedInboundTransferInProgress: + inc metadata.transferInProgressCount + else: + discard - peerCount + metadata proc neighborhoodGossip*( p: PortalProtocol, @@ -1770,7 +1800,7 @@ proc neighborhoodGossip*( contentKeys: ContentKeysList, content: seq[seq[byte]], enableNodeLookup = false, -): Future[int] {.async: (raises: [CancelledError]).} = +): Future[OfferBatchMetadata] {.async: (raises: [CancelledError]).} = ## Run neighborhood gossip for provided content. ## Returns the number of peers to which content was attempted to be gossiped. ## When enableNodeLookup is true then if the local routing table doesn't @@ -1780,7 +1810,7 @@ proc neighborhoodGossip*( ## of nodes in the network) to reduce the number of pings required to populate ## the cache over time as old content is removed when the cache is full. if content.len() == 0: - return 0 + return default(OfferBatchMetadata) var contentList = List[ContentKV, contentKeysLimit].init(@[]) for i, contentItem in content: @@ -1790,7 +1820,7 @@ proc neighborhoodGossip*( # Just taking the first content item as target id. # TODO: come up with something better? let contentId = p.toContentId(contentList[0].contentKey).valueOr: - return 0 + return default(OfferBatchMetadata) # For selecting the closest nodes to whom to gossip the content a mixed # approach is taken: @@ -1854,18 +1884,18 @@ proc neighborhoodGossip*( if offers.len() >= p.config.maxGossipNodes: break - await p.offerBatchGetPeerCount(offers) + await p.offerBatchGetMetadata(offers) proc randomGossip*( p: PortalProtocol, srcNodeId: Opt[NodeId], contentKeys: ContentKeysList, content: seq[seq[byte]], -): Future[int] {.async: (raises: [CancelledError]).} = +): Future[OfferBatchMetadata] {.async: (raises: [CancelledError]).} = ## Run random gossip for provided content. ## Returns the number of peers to which content was attempted to be gossiped. if content.len() == 0: - return 0 + return default(OfferBatchMetadata) var contentList = List[ContentKV, contentKeysLimit].init(@[]) for i, contentItem in content: @@ -1876,7 +1906,7 @@ proc randomGossip*( nodes = p.routingTable.randomNodes(p.config.maxGossipNodes) offers = nodes.mapIt(OfferRequest(dst: it, kind: Direct, contentList: contentList)) - await p.offerBatchGetPeerCount(offers) + await p.offerBatchGetMetadata(offers) proc storeContent*( p: PortalProtocol, diff --git a/portal/rpc/rpc_portal_beacon_api.nim b/portal/rpc/rpc_portal_beacon_api.nim index 0c184ac299..3ea43b0094 100644 --- a/portal/rpc/rpc_portal_beacon_api.nim +++ b/portal/rpc/rpc_portal_beacon_api.nim @@ -140,14 +140,25 @@ proc installPortalBeaconApiHandlers*(rpcServer: RpcServer, p: PortalProtocol) = # TODO: Do we need to convert the received offer to a value without proofs before storing? # TODO: validate and store content locally # storedLocally = p.storeContent(keyBytes, contentId, valueBytes) - peerCount = await p.neighborhoodGossip( + gossipMetadata = await p.neighborhoodGossip( Opt.none(NodeId), ContentKeysList(@[keyBytes]), @[offerValueBytes], enableNodeLookup = true, ) - PutContentResult(storedLocally: false, peerCount: peerCount) + PutContentResult( + storedLocally: false, + peerCount: gossipMetadata.successCount, + acceptMetadata: AcceptMetadata( + acceptedCount: gossipMetadata.acceptedCount, + genericDeclineCount: gossipMetadata.genericDeclineCount, + alreadyStoredCount: gossipMetadata.alreadyStoredCount, + notWithinRadiusCount: gossipMetadata.notWithinRadiusCount, + rateLimitedCount: gossipMetadata.rateLimitedCount, + transferInProgressCount: gossipMetadata.transferInProgressCount, + ), + ) rpcServer.rpc("portal_beaconRandomGossip") do( contentKey: string, contentValue: string @@ -156,8 +167,8 @@ proc installPortalBeaconApiHandlers*(rpcServer: RpcServer, p: PortalProtocol) = keyBytes = ContentKeyByteList.init(hexToSeqByte(contentKey)) offerValueBytes = hexToSeqByte(contentValue) - peerCount = await p.randomGossip( + gossipMetadata = await p.randomGossip( Opt.none(NodeId), ContentKeysList(@[keyBytes]), @[offerValueBytes] ) - peerCount + gossipMetadata.successCount diff --git a/portal/rpc/rpc_portal_history_api.nim b/portal/rpc/rpc_portal_history_api.nim index 0d1a302003..9d3678a1cc 100644 --- a/portal/rpc/rpc_portal_history_api.nim +++ b/portal/rpc/rpc_portal_history_api.nim @@ -157,11 +157,22 @@ proc installPortalHistoryApiHandlers*(rpcServer: RpcServer, p: PortalProtocol) = # validation should be done here anyhow. # As no validation is done here, the content is not stored locally. # TODO: Add default on validation by optional validation parameter. - peerCount = await p.neighborhoodGossip( + gossipMetadata = await p.neighborhoodGossip( Opt.none(NodeId), ContentKeysList(@[keyBytes]), @[offerValueBytes], enableNodeLookup = true, ) - PutContentResult(storedLocally: false, peerCount: peerCount) + PutContentResult( + storedLocally: false, + peerCount: gossipMetadata.successCount, + acceptMetadata: AcceptMetadata( + acceptedCount: gossipMetadata.acceptedCount, + genericDeclineCount: gossipMetadata.genericDeclineCount, + alreadyStoredCount: gossipMetadata.alreadyStoredCount, + notWithinRadiusCount: gossipMetadata.notWithinRadiusCount, + rateLimitedCount: gossipMetadata.rateLimitedCount, + transferInProgressCount: gossipMetadata.transferInProgressCount, + ), + ) diff --git a/portal/rpc/rpc_portal_state_api.nim b/portal/rpc/rpc_portal_state_api.nim index 2658a47c97..cf6879bdf2 100644 --- a/portal/rpc/rpc_portal_state_api.nim +++ b/portal/rpc/rpc_portal_state_api.nim @@ -182,11 +182,22 @@ proc installPortalStateApiHandlers*(rpcServer: RpcServer, p: PortalProtocol) = raise invalidValueErr() storedLocally = p.storeContent(keyBytes, contentId, valueBytes) - peerCount = await p.neighborhoodGossip( + gossipMetadata = await p.neighborhoodGossip( Opt.none(NodeId), ContentKeysList(@[keyBytes]), @[offerValueBytes], enableNodeLookup = true, ) - PutContentResult(storedLocally: storedLocally, peerCount: peerCount) + PutContentResult( + storedLocally: storedLocally, + peerCount: gossipMetadata.successCount, + acceptMetadata: AcceptMetadata( + acceptedCount: gossipMetadata.acceptedCount, + genericDeclineCount: gossipMetadata.genericDeclineCount, + alreadyStoredCount: gossipMetadata.alreadyStoredCount, + notWithinRadiusCount: gossipMetadata.notWithinRadiusCount, + rateLimitedCount: gossipMetadata.rateLimitedCount, + transferInProgressCount: gossipMetadata.transferInProgressCount, + ), + ) diff --git a/portal/rpc/rpc_types.nim b/portal/rpc/rpc_types.nim index 2b0c2b9383..f3f8dd0863 100644 --- a/portal/rpc/rpc_types.nim +++ b/portal/rpc/rpc_types.nim @@ -98,15 +98,25 @@ type ContentItem* = array[2, string] + AcceptMetadata* = object + acceptedCount*: int + genericDeclineCount*: int + alreadyStoredCount*: int + notWithinRadiusCount*: int + rateLimitedCount*: int + transferInProgressCount*: int + PutContentResult* = object - peerCount*: int storedLocally*: bool + peerCount*: int + acceptMetadata*: AcceptMetadata NodeInfo.useDefaultSerializationIn JrpcConv RoutingTableInfo.useDefaultSerializationIn JrpcConv PingResult.useDefaultSerializationIn JrpcConv (string, string).useDefaultSerializationIn JrpcConv ContentInfo.useDefaultSerializationIn JrpcConv +AcceptMetadata.useDefaultSerializationIn JrpcConv PutContentResult.useDefaultSerializationIn JrpcConv func getNodeInfo*(r: RoutingTable): NodeInfo = diff --git a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim index 1393100f25..7333bc4504 100644 --- a/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim +++ b/portal/tests/wire_protocol_tests/test_portal_wire_protocol.nim @@ -282,9 +282,17 @@ procSuite "Portal Wire Protocol Tests": contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]) content: seq[seq[byte]] = @[@[byte 0x04, 0x05, 0x06]] - let peerCount = - await proto1.neighborhoodGossip(Opt.none(NodeId), contentKeys, content) - check peerCount == 1 + block: + let gossipMetadata = + await proto1.neighborhoodGossip(Opt.none(NodeId), contentKeys, content) + check: + gossipMetadata.successCount == 1 + gossipMetadata.acceptedCount == 1 + gossipMetadata.genericDeclineCount == 0 + gossipMetadata.alreadyStoredCount == 0 + gossipMetadata.notWithinRadiusCount == 0 + gossipMetadata.rateLimitedCount == 0 + gossipMetadata.transferInProgressCount == 0 let (srcNodeId, keys, items) = await proto2.stream.contentQueue.popFirst() check: @@ -294,6 +302,22 @@ procSuite "Portal Wire Protocol Tests": keys == contentKeys items == content + # Store the content + proto2.storeContent(keys[0], keys[0].toContentId().get(), items[0]) + + # Gossip the same content a second time + block: + let gossipMetadata = + await proto1.neighborhoodGossip(Opt.none(NodeId), contentKeys, content) + check: + gossipMetadata.successCount == 1 + gossipMetadata.acceptedCount == 0 + gossipMetadata.genericDeclineCount == 0 + gossipMetadata.alreadyStoredCount == 1 + gossipMetadata.notWithinRadiusCount == 0 + gossipMetadata.rateLimitedCount == 0 + gossipMetadata.transferInProgressCount == 0 + await proto1.stopPortalProtocol() await proto2.stopPortalProtocol() @@ -308,8 +332,17 @@ procSuite "Portal Wire Protocol Tests": contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]) content: seq[seq[byte]] = @[@[byte 0x04, 0x05, 0x06]] - let peerCount = await proto1.randomGossip(Opt.none(NodeId), contentKeys, content) - check peerCount == 1 + block: + let gossipMetadata = + await proto1.randomGossip(Opt.none(NodeId), contentKeys, content) + check: + gossipMetadata.successCount == 1 + gossipMetadata.acceptedCount == 1 + gossipMetadata.genericDeclineCount == 0 + gossipMetadata.alreadyStoredCount == 0 + gossipMetadata.notWithinRadiusCount == 0 + gossipMetadata.rateLimitedCount == 0 + gossipMetadata.transferInProgressCount == 0 let (srcNodeId, keys, items) = await proto2.stream.contentQueue.popFirst() check: @@ -319,6 +352,22 @@ procSuite "Portal Wire Protocol Tests": keys == contentKeys items == content + # Store the content + proto2.storeContent(keys[0], keys[0].toContentId().get(), items[0]) + + # Gossip the same content a second time + block: + let gossipMetadata = + await proto1.neighborhoodGossip(Opt.none(NodeId), contentKeys, content) + check: + gossipMetadata.successCount == 1 + gossipMetadata.acceptedCount == 0 + gossipMetadata.genericDeclineCount == 0 + gossipMetadata.alreadyStoredCount == 1 + gossipMetadata.notWithinRadiusCount == 0 + gossipMetadata.rateLimitedCount == 0 + gossipMetadata.transferInProgressCount == 0 + await proto1.stopPortalProtocol() await proto2.stopPortalProtocol() From 71153b95910d4eafa5b9afeb7d74cfddff02d3af Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 11 Jun 2025 22:05:25 +0700 Subject: [PATCH 077/138] Remove unused gasEip7702CodeCheck (#3377) --- execution_chain/evm/interpreter/op_handlers/oph_helpers.nim | 6 ------ 1 file changed, 6 deletions(-) diff --git a/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim b/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim index 5c53519640..ae67faa8d8 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim @@ -60,12 +60,6 @@ proc delegateResolutionCost*(c: Computation, address: Address): GasInt = else: return WarmStorageReadCost -proc gasEip7702CodeCheck*(c: Computation; address: Address): GasInt = - let delegateTo = - parseDelegationAddress(c.vmState.readOnlyLedger.getCode(address)).valueOr: - return 0 - c.delegateResolutionCost(delegateTo) - # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ From 23f1606bed28670e17f4de89074c8ba5c4f38eec Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Thu, 12 Jun 2025 15:14:07 +0200 Subject: [PATCH 078/138] Bump nim-eth for uTP logging improvements (#3385) --- vendor/nim-eth | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-eth b/vendor/nim-eth index 8f43bd6b5d..fb3fde7832 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit 8f43bd6b5d8e4cbee940a9e630080e7801c3835c +Subproject commit fb3fde7832f12dea1673da0072a87fac0be4685a From 8572751619dd53d1146598d55b67ae153b0772f7 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Thu, 12 Jun 2025 15:27:05 +0200 Subject: [PATCH 079/138] Bump portal-mainnet for removal of unresponsive bootstrap nodes (#3386) --- vendor/portal-mainnet | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/portal-mainnet b/vendor/portal-mainnet index 35e7fabcbe..b6a5ef323e 160000 --- a/vendor/portal-mainnet +++ b/vendor/portal-mainnet @@ -1 +1 @@ -Subproject commit 35e7fabcbe2dcf392fbe8184f4187d356b6ab6e9 +Subproject commit b6a5ef323ebc7a471af2fb590e685867acb886e8 From ee9139bfb66bbb53b1669826c319b5e0bc0b0241 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Fri, 13 Jun 2025 10:38:38 +0800 Subject: [PATCH 080/138] Async EVM: Return ok when error message exists so that call output can be returned (#3391) --- portal/evm/async_evm.nim | 5 +---- portal/rpc/rpc_eth_api.nim | 8 ++++++++ 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/portal/evm/async_evm.nim b/portal/evm/async_evm.nim index 9922431da1..25d26b09fc 100644 --- a/portal/evm/async_evm.nim +++ b/portal/evm/async_evm.nim @@ -311,10 +311,7 @@ proc call*( callResult = ?(await evm.callFetchingState(vmState, header, tx, optimisticStateFetch)) - if callResult.error.len() > 0: - err("EVM execution failed: " & callResult.error) - else: - ok(callResult) + ok(callResult) proc createAccessList*( evm: AsyncEvm, header: Header, tx: TransactionArgs, optimisticStateFetch = true diff --git a/portal/rpc/rpc_eth_api.nim b/portal/rpc/rpc_eth_api.nim index 71a71a1311..61992cb972 100644 --- a/portal/rpc/rpc_eth_api.nim +++ b/portal/rpc/rpc_eth_api.nim @@ -9,6 +9,7 @@ import std/sequtils, + stew/byteutils, json_rpc/rpcserver, chronicles, web3/[eth_api_types, conversions], @@ -463,6 +464,13 @@ proc installEthApiHandlers*( let callResult = (await evm.call(header, tx, optimisticStateFetch)).valueOr: raise newException(ValueError, error) + if callResult.error.len() > 0: + raise (ref ApplicationError)( + code: 3, + msg: callResult.error, + data: Opt.some(JsonString("\"" & callResult.output.to0xHex() & "\"")), + ) + return callResult.output rpcServer.rpc("eth_createAccessList") do( From 4b6fa9497c9d83064ddab8d62022f6c1864ce8ef Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Fri, 13 Jun 2025 13:13:26 +0200 Subject: [PATCH 081/138] Improve utp testing readme (#3398) + fix deprecated docker warning --- portal/tools/utp_testing/README.md | 50 +++++++++++-------- .../utp_testing/docker/docker-compose.yml | 10 +++- portal/tools/utp_testing/utp_test.nim | 4 +- 3 files changed, 40 insertions(+), 24 deletions(-) diff --git a/portal/tools/utp_testing/README.md b/portal/tools/utp_testing/README.md index 142e796af6..0133b23879 100644 --- a/portal/tools/utp_testing/README.md +++ b/portal/tools/utp_testing/README.md @@ -1,19 +1,16 @@ # uTP testing infrastructure -Testing infrastructure which enables to test uTP implementation over different +Testing infrastructure which allows to test the uTP implementation over different network conditions on a local machine. -Uses following tools developed to test the QUIC protocol: +Uses following external tool developed to test the QUIC protocol: -[quic-interop-runner](https://github.com/marten-seemann/quic-interop-runner) - -[quic-network-simulator](https://github.com/marten-seemann/quic-network-simulator) +- [quic-network-simulator](https://github.com/marten-seemann/quic-network-simulator) ## Prerequisities - Machine with Docker installed - -- nimbus-eth1 set-up to run `make utp_test` +- nimbus-eth1 repository set-up to run `make utp_test` ## How it works @@ -24,34 +21,45 @@ Test setup uses Docker Compose to start 3 Docker containers: The networking is set up in such way that network traffic is routed from client to server and server to client through the simulator. -The simulator decides what to do with packets passing through based on the selected scneario. +The simulator decides what to do with packets passing through based on the selected scenarios. Explanation from [quic-network-simulator](https://github.com/marten-seemann/quic-network-simulator): -``` -The framework uses two networks on the host machine: `leftnet` (IPv4 +> The framework uses two networks on the host machine: `leftnet` (IPv4 193.167.0.0/24, IPv6 fd00:cafe:cafe:0::/64) and `rightnet` (IPv4 193.167.100.0/24, IPv6 fd00:cafe:cafe:100::/64). `leftnet` is connected to the client docker image, and `rightnet` is connected to the server. The ns-3 simulation sits in the middle and forwards packets between `leftnet` and `rightnet` -``` -## Practicalities +Both the uTP client and uTP server have a JSON-RPC API exposed. The `utp_test` application will use these interfaces to initiate transfers between client and server and to verify their outcome. -For now the process is semi-manual (TODO automate this as much as possible) +## How to run To run integration testing scenarios with different network conditions: +Build the client/server Docker image: + +```sh +cd nimbus-eth1/ +docker build -t test-utp --build-arg BRANCH_NAME={branch-name} portal/tools/utp_testing/docker +``` + +Next run the Docker Compose with selected scenario details: + +```sh +SCENARIO="scenario_details" docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml up +``` + +For example, for the `drop-rate` scenario with specified delay, bandwith, and different drop rate values: + +```sh +SCENARIO="drop-rate --delay=15ms --bandwidth=10Mbps --queue=25 --rate_to_client=10 --rate_to_server=10" docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml up +``` + +Then build & run the application that will execute the tests: ``` -1. cd nimbus-eth1/ -2. docker build -t test-utp --build-arg BRANCH_NAME={branch-name} portal/tools/utp_testing/docker -3. SCENARIO="scenario_details" docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml up - -For example: -SCENARIO="drop-rate --delay=15ms --bandwidth=10Mbps --queue=25 --rate_to_client=0 --rate_to_server=0" docker-compose -f portal/tools/utp_testing/docker/docker-compose.yml up -would start `drop-rate` scenario with specified delay, bandwith, and different drop rates -4. make utp-test +make utp-test ``` All scenarios are specified in: [scenarios](https://github.com/marten-seemann/quic-network-simulator/tree/master/sim/scenarios) diff --git a/portal/tools/utp_testing/docker/docker-compose.yml b/portal/tools/utp_testing/docker/docker-compose.yml index 50d1330111..f81588e4da 100644 --- a/portal/tools/utp_testing/docker/docker-compose.yml +++ b/portal/tools/utp_testing/docker/docker-compose.yml @@ -1,4 +1,12 @@ -version: "3.5" +# Nimbus +# Copyright (c) 2022-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. # TODO Add possibility to configure ports by env variables services: diff --git a/portal/tools/utp_testing/utp_test.nim b/portal/tools/utp_testing/utp_test.nim index 4e6d3a6344..8b89290b4f 100644 --- a/portal/tools/utp_testing/utp_test.nim +++ b/portal/tools/utp_testing/utp_test.nim @@ -21,8 +21,8 @@ proc generateBytesHex(rng: var HmacDrbgContext, length: int): string = # Before running this test suite, there need to be two instances of the # utp_test_app running under the tested ports: 9042, 9041. # Those could be launched locally by running either -# ./utp_test_app --udp-listen-address=127.0.0.1 --rpc-listen-address=0.0.0.0 --udp-port=9041 --rpc-port=9041 -# ./utp_test_app --udp-listen-address=127.0.0.1 --rpc-listen-address=0.0.0.0 --udp-port=9042 --rpc-port=9042 +# ./build/utp_test_app --udp-listen-address=127.0.0.1 --rpc-listen-address=0.0.0.0 --udp-port=9041 --rpc-port=9041 +# ./build/utp_test_app --udp-listen-address=127.0.0.1 --rpc-listen-address=0.0.0.0 --udp-port=9042 --rpc-port=9042 # or # running from docker dir: # 1. docker build -t test-utp --no-cache --build-arg BRANCH_NAME=branch-name . From 3a44d0f3074f248edb3be53a31af1eb22e7e1969 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Fri, 13 Jun 2025 19:37:06 +0800 Subject: [PATCH 082/138] Nimbus EVM: Add revert reason to error string when EVM execution reverts (#3392) * Add revert reason to error string when EVM execution reverts. * Add tests. --- execution_chain/evm/evm_errors.nim | 50 ++++++++++++++++++- .../interpreter/op_handlers/oph_sysops.nim | 17 +++++-- tests/all_tests.nim | 1 + tests/test_evm_errors.nim | 26 ++++++++++ 4 files changed, 90 insertions(+), 4 deletions(-) create mode 100644 tests/test_evm_errors.nim diff --git a/execution_chain/evm/evm_errors.nim b/execution_chain/evm/evm_errors.nim index 72050fd165..17cbfcb062 100644 --- a/execution_chain/evm/evm_errors.nim +++ b/execution_chain/evm/evm_errors.nim @@ -11,7 +11,12 @@ {.push raises: [].} import - results + std/tables, + results, + stint, + stew/[assign2, byteutils], + eth/common/hashes, + web3/encoding export results @@ -70,3 +75,46 @@ template evmErr*(errCode): auto = EvmErrorObj( code: EvmErrorCode.errCode, ) + + +# revertSelector is a special function selector for revert reason unpacking +const revertSelector = keccak256(toBytes("Error(string)")).data[0..3] + +# panicSelector is a special function selector for panic reason unpacking +const panicSelector = keccak256(toBytes("Panic(uint256)")).data[0..3] + +# panicReasons map is for readable panic codes +# see this linkage for the details +# https://docs.soliditylang.org/en/v0.8.21/control-structures.html#panic-via-assert-and-error-via-require +# the reason string list is copied from Geth +# https://github.com/ethers-io/ethers.js/blob/fa3a883ff7c88611ce766f58bdd4b8ac90814470/src.ts/abi/interface.ts#L207-L218 +const panicReasons = { + 0x00: "generic panic", + 0x01: "assert(false)", + 0x11: "arithmetic underflow or overflow", + 0x12: "division or modulo by zero", + 0x21: "enum overflow", + 0x22: "invalid encoded storage byte array accessed", + 0x31: "out-of-bounds array access; popping on an empty array", + 0x32: "out-of-bounds access of an array or bytesN", + 0x41: "out of memory", + 0x51: "uninitialized function", +}.toTable + +# UnpackRevert resolves the abi-encoded revert reason. According to the solidity +# spec https://solidity.readthedocs.io/en/latest/control-structures.html#revert, +# the provided revert reason is abi-encoded as if it were a call to function +# `Error(string)` or `Panic(uint256)`. +proc unpackRevertReason*(data: openArray[byte], reason: var string) = + if data.len() < 4: + reason = "" + return + + let selector = data[0..3] + + if selector == revertSelector: + discard decode(data.toOpenArray(4, data.len() - 1), 0, 0, reason) + elif selector == panicSelector: + var reasonCode: UInt256 + discard decode(data.toOpenArray(4, data.len() - 1), 0, 0, reasonCode) + assign(reason, panicReasons.getOrDefault(reasonCode.truncate(int))) diff --git a/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim b/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim index f3d433366e..89cf368fed 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim @@ -59,13 +59,24 @@ proc revertOp(cpt: VmCpt): EvmResultVoid = cpt.stack.lsShrink(2) ? cpt.opcodeGasCost(Revert, - cpt.gasCosts[Revert].m_handler(cpt.memory.len, pos, len), - reason = "REVERT") + cpt.gasCosts[Revert].m_handler(cpt.memory.len, pos, len), + reason = "REVERT") cpt.memory.extend(pos, len) assign(cpt.output, cpt.memory.read(pos, len)) + + var revertReason: string + unpackRevertReason(cpt.output, revertReason) + + let revertMsg = + if revertReason.len() > 0: + "execution reverted: " & revertReason + else: + "execution reverted" + # setError(msg, false) will signal cheap revert - cpt.setError(StatusCode.Revert, "REVERT opcode executed", false) + cpt.setError(StatusCode.Revert, revertMsg, false) + ok() func invalidOp(cpt: VmCpt): EvmResultVoid = diff --git a/tests/all_tests.nim b/tests/all_tests.nim index fa754a6de4..059fd7a1db 100644 --- a/tests/all_tests.nim +++ b/tests/all_tests.nim @@ -15,6 +15,7 @@ import test_difficulty, test_engine_api, test_evm_support, + test_evm_errors, test_filters, test_forked_chain, test_forkid, diff --git a/tests/test_evm_errors.nim b/tests/test_evm_errors.nim new file mode 100644 index 0000000000..26eaa892c1 --- /dev/null +++ b/tests/test_evm_errors.nim @@ -0,0 +1,26 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +import + stew/byteutils, + unittest2, + ../execution_chain/evm/evm_errors + +suite "EVM errors tests": + test "unpack revert reason data": + let data = "0x08c379a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000018556e69737761705632526f757465723a20455850495245440000000000000000" + + var revertReason: string + unpackRevertReason(data.hexToSeqByte(), revertReason) + check revertReason == "UniswapV2Router: EXPIRED" + + test "unpack panic reason data": + let data = "0x4e487b710000000000000000000000000000000000000000000000000000000000000032" + + var revertReason: string + unpackRevertReason(data.hexToSeqByte(), revertReason) + check revertReason == "out-of-bounds access of an array or bytesN" From 6b21848dd308809a598ecb6fbbdd5f8beaec7c08 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 13 Jun 2025 18:37:19 +0700 Subject: [PATCH 083/138] Add missing notification if something happened to era block loader (#3393) --- execution_chain/db/era1_db/db_desc.nim | 2 +- execution_chain/evm/interpreter/op_handlers/oph_helpers.nim | 1 - execution_chain/nimbus_import.nim | 2 ++ 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/execution_chain/db/era1_db/db_desc.nim b/execution_chain/db/era1_db/db_desc.nim index 7734f85dce..f210aeab4e 100644 --- a/execution_chain/db/era1_db/db_desc.nim +++ b/execution_chain/db/era1_db/db_desc.nim @@ -50,7 +50,7 @@ proc getEra1File*(db: Era1DbRef, era: Era1): Result[Era1File, string] = # TODO: The open call does not do full verification. It is assumed here that # trusted files are used. We might want to add a full validation option. let f = Era1File.open(path).valueOr: - return err(error) + return err(path & ": " & error) if db.files.len > 16: # TODO LRU close(db.files[0]) diff --git a/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim b/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim index ae67faa8d8..55473a5109 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_helpers.nim @@ -15,7 +15,6 @@ {.push raises: [].} import - ../../../core/eip7702, ../../evm_errors, ../../types, ../gas_costs, diff --git a/execution_chain/nimbus_import.nim b/execution_chain/nimbus_import.nim index 8ab72ac028..42d869495e 100644 --- a/execution_chain/nimbus_import.nim +++ b/execution_chain/nimbus_import.nim @@ -294,6 +294,8 @@ proc importBlocks*(conf: NimbusConf, com: CommonRef) = proc loadEraBlock(blockNumber: uint64): bool = db.getEthBlock(blockNumber, blk).isOkOr: + chronicles.error "Error when loading era block", + blockNumber, msg=error return false true From 06917e7ed47a868a198aaacbc08542a1a465238f Mon Sep 17 00:00:00 2001 From: tersec Date: Fri, 13 Jun 2025 18:28:25 +0200 Subject: [PATCH 084/138] increase default gas limit to 45M (#3402) --- execution_chain/constants.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/execution_chain/constants.nim b/execution_chain/constants.nim index 8854f5ead5..a6d0d3762b 100644 --- a/execution_chain/constants.nim +++ b/execution_chain/constants.nim @@ -43,7 +43,7 @@ const GAS_LIMIT_MINIMUM* = 5000 GAS_LIMIT_MAXIMUM* = int64.high.GasInt # Maximum the gas limit (2^63-1). - DEFAULT_GAS_LIMIT* = 36_000_000 + DEFAULT_GAS_LIMIT* = 45_000_000 # https://eips.ethereum.org/EIPS/eip-7825 TX_GAS_LIMIT* = 30_000_000 From 3c572c49b867e66ebe5bb57992191b0c6ea43631 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 16 Jun 2025 15:32:12 +0700 Subject: [PATCH 085/138] Fix missing return value in addEthHandlerCapability (#3405) --- execution_chain/sync/wire_protocol/setup.nim | 1 + 1 file changed, 1 insertion(+) diff --git a/execution_chain/sync/wire_protocol/setup.nim b/execution_chain/sync/wire_protocol/setup.nim index 20d75d0288..abc9585d3d 100644 --- a/execution_chain/sync/wire_protocol/setup.nim +++ b/execution_chain/sync/wire_protocol/setup.nim @@ -27,6 +27,7 @@ proc addEthHandlerCapability*( let wire = EthWireRef.new(txPool, node) node.addCapability(eth68, wire) node.addCapability(eth69, wire) + wire # ------------------------------------------------------------------------------ # End From 90892ddeab70b68d50eaef20bd33f0f91b46ac8a Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 16 Jun 2025 22:53:30 +0700 Subject: [PATCH 086/138] Rlpx handshake using the latest version of a protocol capabilities (#3406) * Rlpx handshake using the latest version of a protocol capabilities * Copy comments from the spec --- execution_chain/networking/rlpx.nim | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/execution_chain/networking/rlpx.nim b/execution_chain/networking/rlpx.nim index ff15833455..dc24252ae6 100644 --- a/execution_chain/networking/rlpx.nim +++ b/execution_chain/networking/rlpx.nim @@ -998,10 +998,29 @@ proc removePeer(network: EthereumNode, peer: Peer) = if observer.protocols.len == 0 or peer.supports(observer.protocols): observer.onPeerDisconnected(peer) +proc selectCapsByLatestVersion(peer: Peer): seq[ProtocolInfo] = + # Avoid using multiple capability handshake when connecting to a peer. + # Use only the latest capability version. e.g. choose eth/69 over eth/68. + # But other capabilities with different name is okay. e.g. snap/1 + + # From the spec: + # https://github.com/ethereum/devp2p/blob/bc76b9809a30e6dc5c8dcda996273f0f9bcf7108/rlpx.md#message-id-based-multiplexing + # "...If multiple versions are shared of the same (equal name) capability, the numerically highest wins, others are ignored." + var map: Table[string, ProtocolInfo] + for proto in peer.dispatcher.activeProtocols: + map.withValue(proto.capability.name, val) do: + if proto.capability.version > val.capability.version: + val[] = proto + do: + map[proto.capability.name] = proto + + for proto in map.values: + result.add proto + proc callDisconnectHandlers( peer: Peer, reason: DisconnectionReason ): Future[void] {.async: (raises: []).} = - let futures = peer.dispatcher.activeProtocols + let futures = peer.selectCapsByLatestVersion() .filterIt(it.onPeerDisconnected != nil) .mapIt(it.onPeerDisconnected(peer, reason)) @@ -1102,7 +1121,7 @@ proc postHelloSteps( # the network and to yield on their `nextMsg` waits. # - let handshakes = peer.dispatcher.activeProtocols + let handshakes = peer.selectCapsByLatestVersion() .filterIt(it.onPeerConnected != nil) .mapIt(it.onPeerConnected(peer)) From 8a5825f82de1e4929ae7e829f2784ef35ff9d06c Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:20:17 +0800 Subject: [PATCH 087/138] Bump nim-stint to 4992f27833104091fba75486d75f890312dff3d7 (#3413) --- vendor/nim-stint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-stint b/vendor/nim-stint index c5a4355407..4992f27833 160000 --- a/vendor/nim-stint +++ b/vendor/nim-stint @@ -1 +1 @@ -Subproject commit c5a4355407551804bb4e15aa6cf8dbd881b944a3 +Subproject commit 4992f27833104091fba75486d75f890312dff3d7 From 1e6d8024fb7796b348685199a47d449332bf7c3d Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:20:33 +0800 Subject: [PATCH 088/138] Bump nimcrypto to 19c41d6be4c00b4a2c8000583bd30cf8ceb5f4b1 (v0.6.3) (#3412) --- vendor/nimcrypto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nimcrypto b/vendor/nimcrypto index dc07e3058c..19c41d6be4 160000 --- a/vendor/nimcrypto +++ b/vendor/nimcrypto @@ -1 +1 @@ -Subproject commit dc07e3058c6904eef965394493b6ea99aa2adefc +Subproject commit 19c41d6be4c00b4a2c8000583bd30cf8ceb5f4b1 From 1cacba3d4b0dde25a1134046f90d071d5bbaafac Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:20:47 +0800 Subject: [PATCH 089/138] Bump nim-bearssl to 11e798b62b8e6beabe958e048e9e24c7e0f9ee63 (#3411) --- vendor/nim-bearssl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-bearssl b/vendor/nim-bearssl index 667b40440a..11e798b62b 160000 --- a/vendor/nim-bearssl +++ b/vendor/nim-bearssl @@ -1 +1 @@ -Subproject commit 667b40440a53a58e9f922e29e20818720c62d9ac +Subproject commit 11e798b62b8e6beabe958e048e9e24c7e0f9ee63 From e6caa75a75dbb41e4ccfdd536044f808bb06fe80 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:21:16 +0800 Subject: [PATCH 090/138] Bump nim-secp256k1 to latest. (#3409) --- vendor/nim-secp256k1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-secp256k1 b/vendor/nim-secp256k1 index 641902d492..f808ed5e7a 160000 --- a/vendor/nim-secp256k1 +++ b/vendor/nim-secp256k1 @@ -1 +1 @@ -Subproject commit 641902d492aff3910b0240217fa0b28b5471baad +Subproject commit f808ed5e7a7bfc42204ec7830f14b7a42b63c284 From 34cf2ecc98e9682b2932226e6ee002e98ffda98a Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:21:31 +0800 Subject: [PATCH 091/138] Bump nim-blscurve to latest. (#3408) --- vendor/nim-blscurve | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-blscurve b/vendor/nim-blscurve index 924bc5a186..bcfb3e77a2 160000 --- a/vendor/nim-blscurve +++ b/vendor/nim-blscurve @@ -1 +1 @@ -Subproject commit 924bc5a1861583b0032cfa6bc9665cc7642d7bd6 +Subproject commit bcfb3e77a2c5e1a02611ee4d03f3a655fe902eb1 From 3988a114541762515e4fbb26e9f60499840e6a0a Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Tue, 17 Jun 2025 15:21:43 +0800 Subject: [PATCH 092/138] Bump nim-kzg4844 to latest. (#3410) --- vendor/nim-kzg4844 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-kzg4844 b/vendor/nim-kzg4844 index 33d2e9a5d4..644a19b987 160000 --- a/vendor/nim-kzg4844 +++ b/vendor/nim-kzg4844 @@ -1 +1 @@ -Subproject commit 33d2e9a5d4df1bbf4aca7cf00457bf15b1d657a6 +Subproject commit 644a19b9874516446411aa31a7747b81d21431e5 From e49395e2f7b4c9ec595c342121459df10eae4a84 Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 17 Jun 2025 14:22:41 +0700 Subject: [PATCH 093/138] Fix wire protocol ticker loop and txhash broadcast handling (#3407) The timer of cleanup and block range update are not recreated properly. Causing one of the longer timer never triggered. Also improves tx hash receiver robustness against malicious peer. --- .../sync/wire_protocol/broadcast.nim | 70 +++++++++++-------- execution_chain/sync/wire_protocol/types.nim | 2 + 2 files changed, 44 insertions(+), 28 deletions(-) diff --git a/execution_chain/sync/wire_protocol/broadcast.nim b/execution_chain/sync/wire_protocol/broadcast.nim index ce7e0ab433..31410ad7c2 100644 --- a/execution_chain/sync/wire_protocol/broadcast.nim +++ b/execution_chain/sync/wire_protocol/broadcast.nim @@ -84,7 +84,7 @@ iterator peers69OrLater(wire: EthWireRef, random: bool = false): Peer = yield peer proc syncerRunning*(wire: EthWireRef): bool = - # Disable transactions gossip and processing when + # Disable transactions gossip and processing when # the syncer is still busy const thresholdTime = 3 * 15 @@ -94,10 +94,10 @@ proc syncerRunning*(wire: EthWireRef): bool = headerTime = wire.chain.latestHeader.timestamp let running = (nowTime - headerTime) > thresholdTime - if running != not wire.gossipEnabled: + if running != not wire.gossipEnabled: wire.gossipEnabled = not running notice "Transaction broadcast state changed", enabled = wire.gossipEnabled - + running proc handleTransactionsBroadcast*(wire: EthWireRef, @@ -148,18 +148,25 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, return wire.reqisterAction("Handle broadcast transactions hashes"): + type + SizeType = object + size: uint64 + txType: byte + + let + numTx = packet.txHashes.len + var i = 0 + map: Table[Hash32, SizeType] - while i < packet.txHashes.len: + while i < numTx: var msg: PooledTransactionsRequest res: Opt[PooledTransactionsPacket] - sizes: seq[uint64] - types: seq[byte] sumSize = 0'u64 - while i < packet.txHashes.len: + while i < numTx: let size = packet.txSizes[i] if sumSize + size > SOFT_RESPONSE_LIMIT.uint64: break @@ -168,8 +175,10 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, if txHash notin wire.txPool: msg.txHashes.add txHash sumSize += size - sizes.add size - types.add packet.txTypes[i] + map[txHash] = SizeType( + size: size, + txType: packet.txTypes[i], + ) awaitQuota(wire, hashLookupCost, "check transaction exists in pool") inc i @@ -187,24 +196,24 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, let ptx = res.get() - for i, tx in ptx.transactions: + for tx in ptx.transactions: # If we receive any blob transactions missing sidecars, or with # sidecars that don't correspond to the versioned hashes reported # in the header, disconnect from the sending peer. - if tx.tx.txType.byte != types[i]: - debug "Protocol Breach: Received transaction with type differ from announced", - remote=peer.remote, clientId=peer.clientId - await peer.disconnect(BreachOfProtocol) - return - let (size, hash) = getEncodedLengthAndHash(tx) - if size.uint64 != sizes[i]: - debug "Protocol Breach: Received transaction with size differ from announced", + map.withValue(hash, val) do: + if tx.tx.txType.byte != val.txType: + debug "Protocol Breach: Received transaction with type differ from announced", remote=peer.remote, clientId=peer.clientId - await peer.disconnect(BreachOfProtocol) - return + await peer.disconnect(BreachOfProtocol) + return - if hash != msg.txHashes[i]: + if size.uint64 != val.size: + debug "Protocol Breach: Received transaction with size differ from announced", + remote=peer.remote, clientId=peer.clientId + await peer.disconnect(BreachOfProtocol) + return + do: debug "Protocol Breach: Received transaction with hash differ from announced", remote=peer.remote, clientId=peer.clientId await peer.disconnect(BreachOfProtocol) @@ -219,14 +228,14 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, if tx.blobsBundle.wrapperVersion == WrapperVersionEIP4844: validateBlobTransactionWrapper4844(tx).isOkOr: - debug "Protocol Breach: Sidecar validation error", msg=error, + debug "Protocol Breach: EIP-4844 sidecar validation error", msg=error, remote=peer.remote, clientId=peer.clientId await peer.disconnect(BreachOfProtocol) return if tx.blobsBundle.wrapperVersion == WrapperVersionEIP7594: validateBlobTransactionWrapper7594(tx).isOkOr: - debug "Protocol Breach: Sidecar validation error", msg=error, + debug "Protocol Breach: EIP-7594 sidecar validation error", msg=error, remote=peer.remote, clientId=peer.clientId await peer.disconnect(BreachOfProtocol) return @@ -238,12 +247,17 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, proc tickerLoop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = while true: + # Create or replenish timer + if wire.cleanupTimer.isNil or wire.cleanupTimer.finished: + wire.cleanupTimer = sleepAsync(cleanupTicker) + + if wire.brUpdateTimer.isNil or wire.brUpdateTimer.finished: + wire.brUpdateTimer = sleepAsync(blockRangeUpdateTicker) + let - cleanup = sleepAsync(cleanupTicker) - update = sleepAsync(blockRangeUpdateTicker) - res = await one(cleanup, update) + res = await one(wire.cleanupTimer, wire.brUpdateTimer) - if res == cleanup: + if res == wire.cleanupTimer: wire.reqisterAction("Periodical cleanup"): var expireds: seq[Hash32] for key, seen in wire.seenTransactions: @@ -255,7 +269,7 @@ proc tickerLoop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = wire.seenTransactions.del(expire) awaitQuota(wire, hashLookupCost, "broadcast transactions hashes") - if res == update: + if res == wire.brUpdateTimer: wire.reqisterAction("Periodical blockRangeUpdate"): let packet = BlockRangeUpdatePacket( diff --git a/execution_chain/sync/wire_protocol/types.nim b/execution_chain/sync/wire_protocol/types.nim index 2bb38d92dd..c561195418 100644 --- a/execution_chain/sync/wire_protocol/types.nim +++ b/execution_chain/sync/wire_protocol/types.nim @@ -131,3 +131,5 @@ type actionHeartbeat*: Future[void].Raising([CancelledError]) actionQueue*: AsyncQueue[ActionHandler] gossipEnabled*: bool + cleanupTimer*: Future[void].Raising([CancelledError]) + brUpdateTimer*: Future[void].Raising([CancelledError]) From 4c3e7295445b6923e0429d62677f0d4a04485372 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 18 Jun 2025 09:10:56 +0800 Subject: [PATCH 094/138] Update nim-rocksdb to v10.2.1.0 (#3370) --- vendor/nim-rocksdb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-rocksdb b/vendor/nim-rocksdb index 0e0c624bef..c8cc5506c8 160000 --- a/vendor/nim-rocksdb +++ b/vendor/nim-rocksdb @@ -1 +1 @@ -Subproject commit 0e0c624befaf00b6a1b43935eb9a80006618196f +Subproject commit c8cc5506c8dd28e78591701ace81dc113bf9ceb7 From 54943cbc82ed455adc7253c98be16f354c11f10a Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Thu, 19 Jun 2025 16:10:32 +0200 Subject: [PATCH 095/138] Portal: default disable the Portal state network (#3387) --- portal/client/nimbus_portal_client_conf.nim | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/portal/client/nimbus_portal_client_conf.nim b/portal/client/nimbus_portal_client_conf.nim index b9afa1dffc..ffe7e58c66 100644 --- a/portal/client/nimbus_portal_client_conf.nim +++ b/portal/client/nimbus_portal_client_conf.nim @@ -106,8 +106,7 @@ type portalSubnetworks* {. desc: "Select which networks (Portal sub-protocols) to enable", - defaultValue: - {PortalSubnetwork.history, PortalSubnetwork.state, PortalSubnetwork.beacon}, + defaultValue: {PortalSubnetwork.history, PortalSubnetwork.beacon}, name: "portal-subnetworks" .}: set[PortalSubnetwork] From 3a009158d3e381562cd4ff855033533dbeadd404 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 20 Jun 2025 09:59:51 +0200 Subject: [PATCH 096/138] defer gc during block processing (#3384) During block processing, we allocate lots and lots of small objects (VertexRef and friends) which causes overhead as the GC is run without much benefit - this can be up to 15% of CPU time during block import when running with a large vertex cache. --- .../core/executor/process_block.nim | 18 ++++++++++++------ execution_chain/utils/utils.nim | 15 +++++++++++++++ 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/execution_chain/core/executor/process_block.nim b/execution_chain/core/executor/process_block.nim index ce1b0262db..fe4f4cc99f 100644 --- a/execution_chain/core/executor/process_block.nim +++ b/execution_chain/core/executor/process_block.nim @@ -280,15 +280,21 @@ proc processBlock*( taskpool: Taskpool = nil, ): Result[void, string] = ## Generalised function to processes `blk` for any network. - ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) - # EIP-3675: no reward for miner in POA/POS - if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): - vmState.calculateReward(blk.header, blk.uncles) + # Processing a block involves making lots and lots of small memory allocations + # meaning that GC overhead can make up for 15% of processing time in extreme + # cases - since each block is bounded in the amount of memory needed, we can + # run collection once per block instead. + deferGc: + ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) - ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) + # EIP-3675: no reward for miner in POA/POS + if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): + vmState.calculateReward(blk.header, blk.uncles) - ok() + ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) + + ok() # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/utils/utils.nim b/execution_chain/utils/utils.nim index 50fc3f3350..3d818e9c2c 100644 --- a/execution_chain/utils/utils.nim +++ b/execution_chain/utils/utils.nim @@ -173,3 +173,18 @@ func weiAmount*(w: Withdrawal): UInt256 = func isGenesis*(header: Header): bool = header.number == 0'u64 and header.parentHash == GENESIS_PARENT_HASH + +template deferGc*(body: untyped): untyped = + when declared(GC_disable): + GC_disable() + + when declared(GC_enable): + defer: + GC_enable() + # Perform a small allocation which indirectly runs the garbage collector - + # unlike GC_fullCollect, this will use the usual nim heuristic for running + # the cycle colllector (which would be extremely expensive to run on each + # collection) + discard newSeq[int](1) + + body From 341384792794fb8542054540405d58db2d2384a6 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 20 Jun 2025 11:23:24 +0200 Subject: [PATCH 097/138] faststreams: bump (#3418) * faststreams: bump * faststreams: bugfixes across the board, slightly less memory usage due to buffer alignment fix * stew: cleanups * also testutils --- scripts/make_states.sh | 6 ++++++ vendor/nim-faststreams | 2 +- vendor/nim-stew | 2 +- vendor/nim-testutils | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/scripts/make_states.sh b/scripts/make_states.sh index 1c1f9bb288..4a72ea9bf1 100755 --- a/scripts/make_states.sh +++ b/scripts/make_states.sh @@ -29,6 +29,7 @@ mkdir -p "$DATA_DIR" while true; do + SECONDS=0 "$SCRIPT_DIR/../build/nimbus_execution_client" import \ --data-dir:"${DATA_DIR}" \ --era1-dir:"${ERA_DIR}" \ @@ -37,4 +38,9 @@ do --max-blocks:${MAX_BLOCKS:-1000000} "$@" cp -ar "${DATA_DIR}" "${DATA_DIR}-$(printf "%04d" $counter)" counter=$((counter+1)) + + if (( $SECONDS < 10 )); then + echo "Done" + exit 0 + fi done diff --git a/vendor/nim-faststreams b/vendor/nim-faststreams index 081ddc2cb0..308b9803ec 160000 --- a/vendor/nim-faststreams +++ b/vendor/nim-faststreams @@ -1 +1 @@ -Subproject commit 081ddc2cb0c6b6d066120bec26de3ab9db23404e +Subproject commit 308b9803ec9f38e1f2447d59557ef160185664a5 diff --git a/vendor/nim-stew b/vendor/nim-stew index 687d1b4ab1..e574001496 160000 --- a/vendor/nim-stew +++ b/vendor/nim-stew @@ -1 +1 @@ -Subproject commit 687d1b4ab1a91e6cc9c92e4fd4d98bec7874c259 +Subproject commit e5740014961438610d336cd81706582dbf2c96f0 diff --git a/vendor/nim-testutils b/vendor/nim-testutils index c36724c469..382d740ac9 160000 --- a/vendor/nim-testutils +++ b/vendor/nim-testutils @@ -1 +1 @@ -Subproject commit c36724c469b657435f40fb7d365ad14640341606 +Subproject commit 382d740ac9fc0d162a120bb07b1cb8afd17a8c01 From 60e98921b376fa92e5174919dda1adf1542462ed Mon Sep 17 00:00:00 2001 From: Jordan Hrycaj Date: Sun, 22 Jun 2025 14:19:28 +0000 Subject: [PATCH 098/138] Beacon sync maint update (#3421) * Code cosmetics, docu/comment and logging updates, etc. details + renamed `*_staged/*_fetch.nim`-> `*_staged/staged_fetch.nim` + extracted helper functions into `*_staged/staged_helpers.nim` + remove some debugging statements from `update.nim` * Rename `headers_staged.nim` -> `headers.nim` and clean up sub-modules why Simplifies source file layout details Rename `headers_staged/staged_*.nim` -> `headers/headers_*.nim` * Rename `blocks_staged.nim` -> `blocks.nim` and clean up sub-modules why Simplifies source file layout details Rename `blocks_staged/staged_*.nim` -> `blocks/blocks_*.nim` --- execution_chain/sync/beacon/worker.nim | 40 ++++------ .../worker/{blocks_staged.nim => blocks.nim} | 46 ++++++----- .../blocks_blocks.nim} | 41 +++------- .../blocks_fetch.nim} | 38 +++------ .../beacon/worker/blocks/blocks_helpers.nim | 50 ++++++++++++ .../beacon/worker/blocks/blocks_import.nim | 49 ++++++++++++ .../blocks_queue.nim} | 0 .../worker/{ => blocks}/blocks_unproc.nim | 2 +- .../{headers_staged.nim => headers.nim} | 18 +++-- .../headers_fetch.nim | 52 ++++-------- .../headers_headers.nim} | 36 ++------- .../beacon/worker/headers/headers_helpers.nim | 64 +++++++++++++++ .../headers_queue.nim} | 0 .../worker/{ => headers}/headers_unproc.nim | 2 +- .../sync/beacon/worker/helpers.nim | 5 +- .../sync/beacon/worker/start_stop.nim | 10 +-- execution_chain/sync/beacon/worker/update.nim | 80 +++++++------------ .../sync/beacon/worker/update/metrics.nim | 4 +- .../sync/beacon/worker/update/ticker.nim | 4 +- execution_chain/sync/beacon/worker_desc.nim | 28 ++++--- execution_chain/sync/sync_desc.nim | 20 ++--- execution_chain/sync/sync_sched.nim | 7 +- 22 files changed, 328 insertions(+), 268 deletions(-) rename execution_chain/sync/beacon/worker/{blocks_staged.nim => blocks.nim} (92%) rename execution_chain/sync/beacon/worker/{blocks_staged/staged_blocks.nim => blocks/blocks_blocks.nim} (87%) rename execution_chain/sync/beacon/worker/{blocks_staged/bodies_fetch.nim => blocks/blocks_fetch.nim} (72%) create mode 100644 execution_chain/sync/beacon/worker/blocks/blocks_helpers.nim create mode 100644 execution_chain/sync/beacon/worker/blocks/blocks_import.nim rename execution_chain/sync/beacon/worker/{blocks_staged/staged_queue.nim => blocks/blocks_queue.nim} (100%) rename execution_chain/sync/beacon/worker/{ => blocks}/blocks_unproc.nim (99%) rename execution_chain/sync/beacon/worker/{headers_staged.nim => headers.nim} (95%) rename execution_chain/sync/beacon/worker/{headers_staged => headers}/headers_fetch.nim (73%) rename execution_chain/sync/beacon/worker/{headers_staged/staged_headers.nim => headers/headers_headers.nim} (82%) create mode 100644 execution_chain/sync/beacon/worker/headers/headers_helpers.nim rename execution_chain/sync/beacon/worker/{headers_staged/staged_queue.nim => headers/headers_queue.nim} (100%) rename execution_chain/sync/beacon/worker/{ => headers}/headers_unproc.nim (99%) diff --git a/execution_chain/sync/beacon/worker.nim b/execution_chain/sync/beacon/worker.nim index 1967a5412a..b90de754f1 100644 --- a/execution_chain/sync/beacon/worker.nim +++ b/execution_chain/sync/beacon/worker.nim @@ -17,7 +17,7 @@ import pkg/stew/[interval_set, sorted_set], ../../common, ./worker/update/[metrics, ticker], - ./worker/[blocks_staged, headers_staged, start_stop, update], + ./worker/[blocks, headers, start_stop, update], ./worker_desc # ------------------------------------------------------------------------------ @@ -29,8 +29,8 @@ proc napUnlessSomethingToCollect( ): Future[bool] {.async: (raises: []).} = ## When idle, save cpu cycles waiting for something to do. if buddy.ctx.hibernate or # not activated yet? - not (buddy.headersStagedCollectOk() or # something on TODO list - buddy.blocksStagedCollectOk()): + not (buddy.headersCollectOk() or # something on TODO list + buddy.blocksCollectOk()): try: await sleepAsync workerIdleWaitInterval except CancelledError: @@ -75,8 +75,7 @@ proc start*(buddy: BeaconBuddyRef; info: static[string]): bool = proc stop*(buddy: BeaconBuddyRef; info: static[string]) = ## Clean up this peer if not buddy.ctx.hibernate: debug info & ": release peer", peer=buddy.peer, - nSyncPeers=(buddy.ctx.pool.nBuddies-1), syncState=($buddy.syncState), - nLaps=buddy.only.nMultiLoop, lastIdleGap=buddy.only.multiRunIdle.toStr + nSyncPeers=(buddy.ctx.pool.nBuddies-1), syncState=($buddy.syncState) buddy.stopBuddy() # -------------------- @@ -107,6 +106,7 @@ proc runTicker*(ctx: BeaconCtxRef; info: static[string]) = ctx.updateMetrics() ctx.updateTicker() + proc runDaemon*( ctx: BeaconCtxRef; info: static[string]; @@ -124,10 +124,10 @@ proc runDaemon*( return # Execute staged block records. - if ctx.blocksStagedProcessOk(): + if ctx.blocksUnstageOk(): # Import bodies from the `staged` queue. - discard await ctx.blocksStagedProcess info + discard await ctx.blocksUnstage info if not ctx.daemon or # Implied by external sync shutdown? ctx.poolMode: # Oops, re-org needed? @@ -170,48 +170,40 @@ proc runPeer*( ## This peer worker method is repeatedly invoked (exactly one per peer) while ## the `buddy.ctrl.poolMode` flag is set `false`. ## - if 0 < buddy.only.nMultiLoop: # statistics/debugging - buddy.only.multiRunIdle = Moment.now() - buddy.only.stoppedMultiRun - buddy.only.nMultiLoop.inc # statistics/debugging - if not await buddy.napUnlessSomethingToCollect(): # Download and process headers and blocks - while buddy.headersStagedCollectOk(): + while buddy.headersCollectOk(): # Collect headers and either stash them on the header chain cache # directly, or stage on the header queue to get them serialised and # stashed, later. - await buddy.headersStagedCollect info + await buddy.headersCollect info # Store serialised headers from the `staged` queue onto the header # chain cache. - if not buddy.headersStagedProcess info: + if not buddy.headersUnstage info: # Need to proceed with another peer (e.g. gap between queue and # header chain cache.) break + # End `while()` + # Fetch bodies and combine them with headers to blocks to be staged. These # staged blocks are then excuted by the daemon process (no `peer` needed.) - while buddy.blocksStagedCollectOk(): + while buddy.blocksCollectOk(): # Collect bodies and either import them via `FC` module, or stage on # the blocks queue to get them serialised and imported, later. - await buddy.blocksStagedCollect info + await buddy.blocksCollect info # Import bodies from the `staged` queue. - if not await buddy.blocksStagedProcess info: + if not await buddy.blocksUnstage info: # Need to proceed with another peer (e.g. gap between top imported # block and blocks queue.) break - # Note that it is important **not** to leave this function to be - # re-invoked by the scheduler unless necessary. While the time gap - # until restarting is typically a few millisecs, there are always - # outliers which well exceed several seconds. This seems to let - # remote peers run into timeouts so they eventually get lost early. - - buddy.only.stoppedMultiRun = Moment.now() # statistics/debugging + # End `while()` # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/sync/beacon/worker/blocks_staged.nim b/execution_chain/sync/beacon/worker/blocks.nim similarity index 92% rename from execution_chain/sync/beacon/worker/blocks_staged.nim rename to execution_chain/sync/beacon/worker/blocks.nim index 7a7e5e5d9b..345bfe27c3 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged.nim +++ b/execution_chain/sync/beacon/worker/blocks.nim @@ -16,8 +16,10 @@ import pkg/stew/[interval_set, sorted_set], ../../../networking/p2p, ../worker_desc, - ./blocks_staged/[bodies_fetch, staged_blocks], - ./blocks_unproc + ./blocks/[blocks_blocks, blocks_helpers, blocks_queue, blocks_unproc] + +export + blocks_queue, blocks_unproc # ------------------------------------------------------------------------------ # Private function(s) @@ -25,7 +27,7 @@ import proc blocksStagedProcessImpl( ctx: BeaconCtxRef; - maybePeer: Opt[Peer]; + maybePeer: Opt[BeaconBuddyRef]; info: static[string]; ): Future[bool] {.async: (raises: []).} = @@ -52,7 +54,7 @@ proc blocksStagedProcessImpl( # round: no unprocessed block number range precedes the least staged block. let minNum = qItem.data.blocks[0].header.number if ctx.subState.top + 1 < minNum: - trace info & ": block queue not ready yet", peer=($maybePeer), + trace info & ": block queue not ready yet", peer=maybePeer.toStr, topImported=ctx.subState.top.bnStr, qItem=qItem.data.blocks.bnStr, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies switchPeer = true # there is a gap -- come back later @@ -79,7 +81,7 @@ proc blocksStagedProcessImpl( nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies, switchPeer elif 0 < ctx.blk.staged.len and not switchPeer: - trace info & ": no blocks unqueued", peer=($maybePeer), + trace info & ": no blocks unqueued", peer=maybePeer.toStr, topImported=ctx.subState.top.bnStr, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies @@ -89,26 +91,18 @@ proc blocksStagedProcessImpl( # Public functions # ------------------------------------------------------------------------------ -func blocksStagedCollectOk*(buddy: BeaconBuddyRef): bool = +func blocksCollectOk*(buddy: BeaconBuddyRef): bool = ## Check whether body records can be fetched and imported or stored ## on the `staged` queue. ## if buddy.ctrl.running: let ctx = buddy.ctx if 0 < ctx.blocksUnprocAvail() and - not ctx.blocksModeStopped(): + not ctx.blkSessionStopped(): return true false -proc blocksStagedProcessOk*(ctx: BeaconCtxRef): bool = - ## Check whether import processing is possible - ## - not ctx.poolMode and - 0 < ctx.blk.staged.len - -# -------------- - -proc blocksStagedCollect*( +proc blocksCollect*( buddy: BeaconBuddyRef; info: static[string]; ) {.async: (raises: []).} = @@ -168,7 +162,7 @@ proc blocksStagedCollect*( ctx.pool.seenData = true # blocks data exist # Import blocks (no staging) - await ctx.blocksImport(Opt.some(peer), blocks, buddy.peerID, info) + await ctx.blocksImport(Opt.some(buddy), blocks, buddy.peerID, info) # Import may be incomplete, so a partial roll back may be needed let lastBn = blocks[^1].header.number @@ -225,24 +219,32 @@ proc blocksStagedCollect*( info "Queued/staged or imported blocks", topImported=ctx.subState.top.bnStr, - unprocBottom=(if ctx.blocksModeStopped(): "n/a" + unprocBottom=(if ctx.blkSessionStopped(): "n/a" else: ctx.blocksUnprocAvailBottom.bnStr), nQueued, nImported, nStagedQ=ctx.blk.staged.len, nSyncPeers=ctx.pool.nBuddies +# -------------- -template blocksStagedProcess*( +proc blocksUnstageOk*(ctx: BeaconCtxRef): bool = + ## Check whether import processing is possible + ## + not ctx.poolMode and + 0 < ctx.blk.staged.len + +template blocksUnstage*( ctx: BeaconCtxRef; info: static[string]; ): auto = - ctx.blocksStagedProcessImpl(Opt.none(Peer), info) + ctx.blocksStagedProcessImpl(Opt.none(BeaconBuddyRef), info) -template blocksStagedProcess*( +template blocksUnstage*( buddy: BeaconBuddyRef; info: static[string]; ): auto = - buddy.ctx.blocksStagedProcessImpl(Opt.some(buddy.peer), info) + buddy.ctx.blocksStagedProcessImpl(Opt.some(buddy), info) +# -------------- proc blocksStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ## Some pool mode intervention. diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim b/execution_chain/sync/beacon/worker/blocks/blocks_blocks.nim similarity index 87% rename from execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim rename to execution_chain/sync/beacon/worker/blocks/blocks_blocks.nim index 99c6aab1af..7229fa8edc 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/staged_blocks.nim +++ b/execution_chain/sync/beacon/worker/blocks/blocks_blocks.nim @@ -17,8 +17,8 @@ import ../../../../networking/p2p, ../../../wire_protocol/types, ../../worker_desc, - ../[blocks_unproc, update], - ./bodies_fetch + ../update, + ./[blocks_fetch, blocks_helpers, blocks_import, blocks_unproc] # ------------------------------------------------------------------------------ # Private helpers @@ -72,7 +72,7 @@ proc blocksFetchCheckImpl( request.blockHashes[^1] = blocks[^1].header.computeBlockHash # Fetch bodies - let bodies = (await buddy.bodiesFetch(request, info)).valueOr: + let bodies = (await buddy.fetchBodies request).valueOr: return Opt.none(seq[EthBlock]) if buddy.ctrl.stopped: return Opt.none(seq[EthBlock]) @@ -93,8 +93,8 @@ proc blocksFetchCheckImpl( break checkTxLenOk # Oops, cut off the rest blocks.setLen(n) # curb off junk - buddy.fetchRegisterError() - trace info & ": cut off junk blocks", peer, iv, n, + buddy.bdyFetchRegisterError() + trace info & ": Cut off junk blocks", peer, iv, n, nTxs=bodies[n].transactions.len, nBodies, bdyErrors=buddy.bdyErrors break loop @@ -121,13 +121,6 @@ proc blocksFetchCheckImpl( # Public functions # ------------------------------------------------------------------------------ -func blocksModeStopped*(ctx: BeaconCtxRef): bool = - ## Helper, checks whether there is a general stop conditions based on - ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) - ctx.poolMode or - ctx.pool.lastState != blocks - - proc blocksFetch*( buddy: BeaconBuddyRef; num: uint; @@ -153,7 +146,7 @@ proc blocksFetch*( # Job might have been cancelled or completed while downloading blocks. # If so, no more bookkeeping of blocks must take place. The *books* # might have been reset and prepared for the next stage. - if ctx.blocksModeStopped(): + if ctx.blkSessionStopped(): return Opt.none(seq[EthBlock]) # stop, exit this function # Commit blocks received @@ -167,7 +160,7 @@ proc blocksFetch*( proc blocksImport*( ctx: BeaconCtxRef; - maybePeer: Opt[Peer]; + maybePeer: Opt[BeaconBuddyRef]; blocks: seq[EthBlock]; peerID: Hash; info: static[string]; @@ -179,7 +172,7 @@ proc blocksImport*( let iv = BnRange.new(blocks[0].header.number, blocks[^1].header.number) doAssert iv.len == blocks.len.uint64 - trace info & ": Start importing blocks", peer=($maybePeer), iv, + trace info & ": Start importing blocks", peer=maybePeer.toStr, iv, nBlocks=iv.len, base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr @@ -188,16 +181,8 @@ proc blocksImport*( for n in 0 ..< blocks.len: let nBn = blocks[n].header.number - if nBn <= ctx.chain.baseNumber: - trace info & ": ignoring block less eq. base", n, iv, nBlocks=iv.len, - nthBn=nBn.bnStr, nthHash=ctx.getNthHash(blocks, n).short, - B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr - - ctx.subState.top = nBn # well, not really imported - continue - - try: - (await ctx.chain.queueImportBlock blocks[n]).isOkOr: + (await ctx.importBlock(maybePeer, blocks[n], peerID)).isOkOr: + if not error.cancelled: isError = true # Mark peer that produced that unusable headers list as a zombie @@ -231,11 +216,9 @@ proc blocksImport*( head=ctx.chain.latestNumber.bnStr, blkFailCount=ctx.subState.procFailCount, `error`=error - break loop # stop - # isOk => next instruction - except CancelledError: - break loop # shutdown? + break loop + # isOk => next instruction ctx.subState.top = nBn # Block imported OK # Allow pseudo/async thread switch. diff --git a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim b/execution_chain/sync/beacon/worker/blocks/blocks_fetch.nim similarity index 72% rename from execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim rename to execution_chain/sync/beacon/worker/blocks/blocks_fetch.nim index 0ab6cc568a..9601559e16 100644 --- a/execution_chain/sync/beacon/worker/blocks_staged/bodies_fetch.nim +++ b/execution_chain/sync/beacon/worker/blocks/blocks_fetch.nim @@ -15,31 +15,16 @@ import pkg/eth/common, pkg/stew/interval_set, ../../../wire_protocol, - ../../worker_desc + ../../worker_desc, + ./blocks_helpers # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ -func bdyErrors*(buddy: BeaconBuddyRef): string = - $buddy.only.nRespErrors.blk & "/" & $buddy.nBlkProcErrors() - -proc fetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = - buddy.only.nRespErrors.blk.inc - if nFetchBodiesErrThreshold < buddy.only.nRespErrors.blk: - if buddy.ctx.pool.nBuddies == 1 and slowPeer: - # Remember that the current peer is the last one and is lablelled slow. - # It would have been zombified if it were not the last one. This can be - # used in functions -- depending on context -- that will trigger if the - # if the pool of available sync peers becomes empty. - buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) - else: - buddy.ctrl.zombie = true # abandon slow peer unless last one - -proc bodiesFetch*( +proc fetchBodies*( buddy: BeaconBuddyRef; request: BlockBodiesRequest; - info: static[string]; ): Future[Result[seq[BlockBody],void]] {.async: (raises: []).} = ## Fetch bodies from the network. @@ -48,7 +33,8 @@ proc bodiesFetch*( start = Moment.now() nReq = request.blockHashes.len - trace trEthSendSendingGetBlockBodies, peer, nReq, bdyErrors=buddy.bdyErrors + trace trEthSendSendingGetBlockBodies, + peer, nReq, bdyErrors=buddy.bdyErrors var resp: Opt[BlockBodiesPacket] try: @@ -56,12 +42,14 @@ proc bodiesFetch*( except PeerDisconnected as e: buddy.only.nRespErrors.blk.inc buddy.ctrl.zombie = true - `info` info & " error", peer, nReq, elapsed=(Moment.now() - start).toStr, + info trEthRecvReceivedBlockBodies & " error", peer, nReq, + elapsed=(Moment.now() - start).toStr, error=($e.name), msg=e.msg, bdyErrors=buddy.bdyErrors return err() except CatchableError as e: - buddy.fetchRegisterError() - `info` info & " error", peer, nReq, elapsed=(Moment.now() - start).toStr, + buddy.bdyFetchRegisterError() + info trEthRecvReceivedBlockBodies & " error", peer, nReq, + elapsed=(Moment.now() - start).toStr, error=($e.name), msg=e.msg, bdyErrors=buddy.bdyErrors return err() @@ -76,7 +64,7 @@ proc bodiesFetch*( # Evaluate result if resp.isNone or buddy.ctrl.stopped: - buddy.fetchRegisterError() + buddy.bdyFetchRegisterError() trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=0, elapsed=elapsed.toStr, syncState=($buddy.syncState), bdyErrors=buddy.bdyErrors @@ -84,7 +72,7 @@ proc bodiesFetch*( let b: seq[BlockBody] = resp.get.bodies if b.len == 0 or nReq < b.len: - buddy.fetchRegisterError() + buddy.bdyFetchRegisterError() trace trEthRecvReceivedBlockBodies, peer, nReq, nResp=b.len, elapsed=elapsed.toStr, syncState=($buddy.syncState), nRespErrors=buddy.only.nRespErrors.blk @@ -94,7 +82,7 @@ proc bodiesFetch*( # mimimum share of the number of requested headers expected, typically 10%. if fetchBodiesErrTimeout < elapsed or b.len.uint64 * 100 < nReq.uint64 * fetchBodiesMinResponsePC: - buddy.fetchRegisterError(slowPeer=true) + buddy.bdyFetchRegisterError(slowPeer=true) else: buddy.only.nRespErrors.blk = 0 # reset error count buddy.ctx.pool.lastSlowPeer = Opt.none(Hash) # not last one or not error diff --git a/execution_chain/sync/beacon/worker/blocks/blocks_helpers.nim b/execution_chain/sync/beacon/worker/blocks/blocks_helpers.nim new file mode 100644 index 0000000000..dc35f94cc3 --- /dev/null +++ b/execution_chain/sync/beacon/worker/blocks/blocks_helpers.nim @@ -0,0 +1,50 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at +# https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at +# https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises:[].} + +import + ../../worker_desc + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +func toStr*(w: Opt[BeaconBuddyRef]): string = + if w.isSome: $w.value.peer else: "n/a" + +# ------------- + +func bdyErrors*(buddy: BeaconBuddyRef): string = + $buddy.only.nRespErrors.blk & "/" & $buddy.nBlkProcErrors() + +proc bdyFetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = + buddy.only.nRespErrors.blk.inc + if nFetchBodiesErrThreshold < buddy.only.nRespErrors.blk: + if buddy.ctx.pool.nBuddies == 1 and slowPeer: + # Remember that the current peer is the last one and is lablelled slow. + # It would have been zombified if it were not the last one. This can be + # used in functions -- depending on context -- that will trigger if the + # if the pool of available sync peers becomes empty. + buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) + else: + buddy.ctrl.zombie = true # abandon slow peer unless last one + +# ------------- + +func blkSessionStopped*(ctx: BeaconCtxRef): bool = + ## Helper, checks whether there is a general stop conditions based on + ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) + ctx.poolMode or + ctx.pool.lastState != SyncState.blocks + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/blocks/blocks_import.nim b/execution_chain/sync/beacon/worker/blocks/blocks_import.nim new file mode 100644 index 0000000000..4224546bbf --- /dev/null +++ b/execution_chain/sync/beacon/worker/blocks/blocks_import.nim @@ -0,0 +1,49 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at +# https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at +# https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises:[].} + +import + pkg/[chronicles, chronos, results], + pkg/eth/common, + ../../../wire_protocol, + ../../worker_desc, + ./blocks_helpers + +# ------------------------------------------------------------------------------ +# Public function +# ------------------------------------------------------------------------------ + +proc importBlock*( + ctx: BeaconCtxRef; + maybePeer: Opt[BeaconBuddyRef]; + blk: EthBlock; + effPeerID: Hash; + ): Future[Result[void,ImportBlockError]] + {.async: (raises: []).} = + ## Wrapper around blocks importer + + if blk.header.number <= ctx.chain.baseNumber: + trace "Ignoring block less eq. base", peer=maybePeer.toStr, blk=blk.bnStr, + B=ctx.chain.baseNumber.bnStr, L=ctx.chain.latestNumber.bnStr + return ok() + + try: + (await ctx.chain.queueImportBlock blk).isOkOr: + return err((error,false)) + except CancelledError as e: + return err(($e.name & ": " & e.msg, true)) + + return ok() + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ + diff --git a/execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim b/execution_chain/sync/beacon/worker/blocks/blocks_queue.nim similarity index 100% rename from execution_chain/sync/beacon/worker/blocks_staged/staged_queue.nim rename to execution_chain/sync/beacon/worker/blocks/blocks_queue.nim diff --git a/execution_chain/sync/beacon/worker/blocks_unproc.nim b/execution_chain/sync/beacon/worker/blocks/blocks_unproc.nim similarity index 99% rename from execution_chain/sync/beacon/worker/blocks_unproc.nim rename to execution_chain/sync/beacon/worker/blocks/blocks_unproc.nim index 200e370bf1..288ffb25f7 100644 --- a/execution_chain/sync/beacon/worker/blocks_unproc.nim +++ b/execution_chain/sync/beacon/worker/blocks/blocks_unproc.nim @@ -14,7 +14,7 @@ import pkg/eth/common, pkg/results, pkg/stew/interval_set, - ../worker_desc + ../../worker_desc # ------------------------------------------------------------------------------ # Public functions diff --git a/execution_chain/sync/beacon/worker/headers_staged.nim b/execution_chain/sync/beacon/worker/headers.nim similarity index 95% rename from execution_chain/sync/beacon/worker/headers_staged.nim rename to execution_chain/sync/beacon/worker/headers.nim index 8a70a36730..8c42ea9aa1 100644 --- a/execution_chain/sync/beacon/worker/headers_staged.nim +++ b/execution_chain/sync/beacon/worker/headers.nim @@ -15,23 +15,25 @@ import pkg/eth/common, pkg/stew/[interval_set, sorted_set], ../worker_desc, - ./headers_staged/[headers_fetch, staged_headers], - ./headers_unproc + ./headers/[headers_headers, headers_helpers, headers_queue, headers_unproc] + +export + headers_queue, headers_unproc # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ -func headersStagedCollectOk*(buddy: BeaconBuddyRef): bool = +func headersCollectOk*(buddy: BeaconBuddyRef): bool = ## Helper for `worker.nim`, etc. if buddy.ctrl.running: let ctx = buddy.ctx if 0 < ctx.headersUnprocAvail() and - not ctx.headersModeStopped(): + not ctx.hdrSessionStopped(): return true false -proc headersStagedCollect*( +proc headersCollect*( buddy: BeaconBuddyRef; info: static[string]; ) {.async: (raises: []).} = @@ -160,13 +162,14 @@ proc headersStagedCollect*( return info "Queued/staged or DB/stored headers", - unprocTop=(if ctx.headersModeStopped(): "n/a" + unprocTop=(if ctx.hdrSessionStopped(): "n/a" else: ctx.headersUnprocAvailTop.bnStr), nQueued, nStored, nStagedQ=ctx.hdr.staged.len, nSyncPeers=ctx.pool.nBuddies +# -------------- -proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = +proc headersUnstage*(buddy: BeaconBuddyRef; info: static[string]): bool = ## Store headers from the `staged` queue onto the header chain cache. ## ## The function returns `false` if the caller should make sure to allow @@ -227,6 +230,7 @@ proc headersStagedProcess*(buddy: BeaconBuddyRef; info: static[string]): bool = not switchPeer +# -------------- proc headersStagedReorg*(ctx: BeaconCtxRef; info: static[string]) = ## Some pool mode intervention. diff --git a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim b/execution_chain/sync/beacon/worker/headers/headers_fetch.nim similarity index 73% rename from execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim rename to execution_chain/sync/beacon/worker/headers/headers_fetch.nim index c2891f5e57..774ebd24f0 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/headers_fetch.nim +++ b/execution_chain/sync/beacon/worker/headers/headers_fetch.nim @@ -15,43 +15,21 @@ import pkg/eth/common, pkg/stew/interval_set, ../../../wire_protocol, - ../../worker_desc - -# ------------------------------------------------------------------------------ -# Private functions -# ------------------------------------------------------------------------------ - -proc registerError(buddy: BeaconBuddyRef, slowPeer = false) = - buddy.only.nRespErrors.hdr.inc - if nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr: - if buddy.ctx.pool.nBuddies == 1 and slowPeer: - # Remember that the current peer is the last one and is lablelled slow. - # It would have been zombified if it were not the last one. This can be - # used in functions -- depending on context -- that will trigger if the - # if the pool of available sync peers becomes empty. - buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) - else: - buddy.ctrl.zombie = true # abandon slow peer unless last one - -# ------------------------------------------------------------------------------ -# Public debugging & logging helpers -# ------------------------------------------------------------------------------ - -func hdrErrors*(buddy: BeaconBuddyRef): string = - $buddy.only.nRespErrors.hdr & "/" & $buddy.nHdrProcErrors() + ../../worker_desc, + ./headers_helpers # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ -proc headersFetchReversed*( +proc fetchHeadersReversed*( buddy: BeaconBuddyRef; ivReq: BnRange; topHash: Hash32; - info: static[string]; ): Future[Result[seq[Header],void]] {.async: (raises: []).} = - ## Get a list of headers in reverse order. + ## From the ethXX argument peer implied by `buddy` fetch a list of headers + ## in reversed order. let peer = buddy.peer req = block: @@ -88,14 +66,16 @@ proc headersFetchReversed*( except PeerDisconnected as e: buddy.only.nRespErrors.hdr.inc buddy.ctrl.zombie = true - `info` info & " error", peer, ivReq, nReq=req.maxResults, - hash=topHash.toStr, elapsed=(Moment.now() - start).toStr, + info trEthRecvReceivedBlockHeaders & ": error", peer, ivReq, + nReq=req.maxResults, hash=topHash.toStr, + elapsed=(Moment.now() - start).toStr, error=($e.name), msg=e.msg, hdrErrors=buddy.hdrErrors return err() except CatchableError as e: - buddy.registerError() - `info` info & " error", peer, ivReq, nReq=req.maxResults, - hash=topHash.toStr, elapsed=(Moment.now() - start).toStr, + buddy.hdrFetchRegisterError() + info trEthRecvReceivedBlockHeaders & ": error", peer, ivReq, + nReq=req.maxResults, hash=topHash.toStr, + elapsed=(Moment.now() - start).toStr, error=($e.name), msg=e.msg, hdrErrors=buddy.hdrErrors return err() @@ -110,7 +90,7 @@ proc headersFetchReversed*( # Evaluate result if resp.isNone or buddy.ctrl.stopped: - buddy.registerError() + buddy.hdrFetchRegisterError() trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, nResp=0, elapsed=elapsed.toStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors @@ -118,7 +98,7 @@ proc headersFetchReversed*( let h: seq[Header] = resp.get.headers if h.len == 0 or ivReq.len < h.len.uint64: - buddy.registerError() + buddy.hdrFetchRegisterError() trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, nResp=h.len, elapsed=elapsed.toStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors @@ -126,7 +106,7 @@ proc headersFetchReversed*( # Verify that first block number matches if h[^1].number != ivReq.minPt: - buddy.registerError() + buddy.hdrFetchRegisterError() trace trEthRecvReceivedBlockHeaders, peer, nReq=req.maxResults, hash=topHash.toStr, ivReqMinPt=ivReq.minPt.bnStr, ivRespMinPt=h[^1].bnStr, nResp=h.len, elapsed=elapsed.toStr, @@ -137,7 +117,7 @@ proc headersFetchReversed*( # mimimum share of the number of requested headers expected, typically 10%. if fetchHeadersErrTimeout < elapsed or h.len.uint64 * 100 < req.maxResults * fetchHeadersMinResponsePC: - buddy.registerError(slowPeer=true) + buddy.hdrFetchRegisterError(slowPeer=true) else: buddy.only.nRespErrors.hdr = 0 # reset error count buddy.ctx.pool.lastSlowPeer = Opt.none(Hash) # not last one or not error diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim b/execution_chain/sync/beacon/worker/headers/headers_headers.nim similarity index 82% rename from execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim rename to execution_chain/sync/beacon/worker/headers/headers_headers.nim index 26adb065fb..1acdb8b710 100644 --- a/execution_chain/sync/beacon/worker/headers_staged/staged_headers.nim +++ b/execution_chain/sync/beacon/worker/headers/headers_headers.nim @@ -14,38 +14,14 @@ import pkg/[chronicles, chronos], pkg/eth/common, pkg/stew/interval_set, + ../../../../networking/p2p, ../../worker_desc, - ../headers_unproc, - ./headers_fetch + ./[headers_fetch, headers_helpers, headers_unproc] # ------------------------------------------------------------------------------ # Public helper functions # ------------------------------------------------------------------------------ -proc headersUpdateBuddyErrorState*(buddy: BeaconBuddyRef) = - ## Helper/wrapper - if ((0 < buddy.only.nRespErrors.hdr or - 0 < buddy.nHdrProcErrors()) and buddy.ctrl.stopped) or - nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr or - nProcHeadersErrThreshold < buddy.nHdrProcErrors(): - - # Make sure that this peer does not immediately reconnect - buddy.ctrl.zombie = true - -proc headersUpdateBuddyProcError*(buddy: BeaconBuddyRef) = - buddy.incHdrProcErrors() - buddy.headersUpdateBuddyErrorState() - -# ----------------- - -func headersModeStopped*(ctx: BeaconCtxRef): bool = - ## Helper, checks whether there is a general stop conditions based on - ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) - ctx.poolMode or - ctx.pool.lastState != headers or - ctx.hdrCache.state != collecting - - proc headersFetch*( buddy: BeaconBuddyRef; parent: Hash32; @@ -72,12 +48,12 @@ proc headersFetch*( return Opt.none(seq[Header]) # stop, exit function # Fetch headers for this range of block numbers - rc = await buddy.headersFetchReversed(iv, parent, info) + rc = await buddy.fetchHeadersReversed(iv, parent) # Job might have been cancelled or completed while downloading headers. # If so, no more bookkeeping of headers must take place. The *books* # might have been reset and prepared for the next stage. - if ctx.headersModeStopped(): + if ctx.hdrSessionStopped(): return Opt.none(seq[Header]) # stop, exit function if rc.isErr: @@ -89,9 +65,9 @@ proc headersFetch*( nHeaders = rc.value.len.uint64 ivBottom = iv.maxPt - nHeaders + 1 if rc.value[0].number != iv.maxPt or rc.value[^1].number != ivBottom: - buddy.headersUpdateBuddyProcError() + buddy.hdrProcRegisterError() ctx.headersUnprocCommit(iv, iv) # clean up, revert `iv` - debug info & ": garbled header list", peer, iv, headers=rc.value.bnStr, + debug info & ": Garbled header list", peer, iv, headers=rc.value.bnStr, expected=(ivBottom,iv.maxPt).bnStr, syncState=($buddy.syncState), hdrErrors=buddy.hdrErrors return Opt.none(seq[Header]) # stop, exit function diff --git a/execution_chain/sync/beacon/worker/headers/headers_helpers.nim b/execution_chain/sync/beacon/worker/headers/headers_helpers.nim new file mode 100644 index 0000000000..0ff1ffeb6f --- /dev/null +++ b/execution_chain/sync/beacon/worker/headers/headers_helpers.nim @@ -0,0 +1,64 @@ +# Nimbus +# Copyright (c) 2023-2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at +# https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at +# https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises:[].} + +import + ../../worker_desc + +# ------------------------------------------------------------------------------ +# Private helpers +# ------------------------------------------------------------------------------ + +proc updateErrorState(buddy: BeaconBuddyRef) = + ## Helper/wrapper + if ((0 < buddy.only.nRespErrors.hdr or + 0 < buddy.nHdrProcErrors()) and buddy.ctrl.stopped) or + nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr or + nProcHeadersErrThreshold < buddy.nHdrProcErrors(): + + # Make sure that this peer does not immediately reconnect + buddy.ctrl.zombie = true + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +func hdrErrors*(buddy: BeaconBuddyRef): string = + $buddy.only.nRespErrors.hdr & "/" & $buddy.nHdrProcErrors() + +proc hdrFetchRegisterError*(buddy: BeaconBuddyRef, slowPeer = false) = + buddy.only.nRespErrors.hdr.inc + if nFetchHeadersErrThreshold < buddy.only.nRespErrors.hdr: + if buddy.ctx.pool.nBuddies == 1 and slowPeer: + # Remember that the current peer is the last one and is lablelled slow. + # It would have been zombified if it were not the last one. This can be + # used in functions -- depending on context -- that will trigger if the + # if the pool of available sync peers becomes empty. + buddy.ctx.pool.lastSlowPeer = Opt.some(buddy.peerID) + else: + buddy.ctrl.zombie = true # abandon slow peer unless last one + +proc hdrProcRegisterError*(buddy: BeaconBuddyRef) = + buddy.incHdrProcErrors() + buddy.updateErrorState() + +# ----------------- + +func hdrSessionStopped*(ctx: BeaconCtxRef): bool = + ## Helper, checks whether there is a general stop conditions based on + ## state settings (not on sync peer ctrl as `buddy.ctrl.running`.) + ctx.poolMode or + ctx.pool.lastState != SyncState.headers or + ctx.hdrCache.state != collecting + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim b/execution_chain/sync/beacon/worker/headers/headers_queue.nim similarity index 100% rename from execution_chain/sync/beacon/worker/headers_staged/staged_queue.nim rename to execution_chain/sync/beacon/worker/headers/headers_queue.nim diff --git a/execution_chain/sync/beacon/worker/headers_unproc.nim b/execution_chain/sync/beacon/worker/headers/headers_unproc.nim similarity index 99% rename from execution_chain/sync/beacon/worker/headers_unproc.nim rename to execution_chain/sync/beacon/worker/headers/headers_unproc.nim index 9f958f85df..23f286ce72 100644 --- a/execution_chain/sync/beacon/worker/headers_unproc.nim +++ b/execution_chain/sync/beacon/worker/headers/headers_unproc.nim @@ -14,7 +14,7 @@ import pkg/eth/common, pkg/results, pkg/stew/interval_set, - ../worker_desc + ../../worker_desc # ------------------------------------------------------------------------------ # Public functions diff --git a/execution_chain/sync/beacon/worker/helpers.nim b/execution_chain/sync/beacon/worker/helpers.nim index 82df9be7a4..471ac7da40 100644 --- a/execution_chain/sync/beacon/worker/helpers.nim +++ b/execution_chain/sync/beacon/worker/helpers.nim @@ -21,7 +21,7 @@ import ../worker_const export - prettify, short + prettify, short, `$` func bnStr*(w: BlockNumber): string = "#" & $w @@ -63,9 +63,6 @@ func toStr*(h: Hash32): string = func `$`*(w: Interval[BlockNumber,uint64]): string = w.bnStr -func `$`*(w: Opt[Peer]): string = - if w.isSome: $w.value else: "n/a" - func `$`*(w: (SyncState,HeaderChainMode,bool)): string = $w[0] & "." & $w[1] & (if w[2]: ":" & "poolMode" else: "") diff --git a/execution_chain/sync/beacon/worker/start_stop.nim b/execution_chain/sync/beacon/worker/start_stop.nim index 37a5925f9e..97e2c4a969 100644 --- a/execution_chain/sync/beacon/worker/start_stop.nim +++ b/execution_chain/sync/beacon/worker/start_stop.nim @@ -15,9 +15,7 @@ import ../../../networking/p2p, ../../wire_protocol, ../worker_desc, - ./blocks_staged/staged_queue, - ./headers_staged/staged_queue, - ./[blocks_unproc, headers_unproc, update] + ./[blocks, headers, update] type SyncStateData = tuple @@ -29,10 +27,10 @@ type proc querySyncProgress(ctx: BeaconCtxRef): SyncStateData = ## Syncer status query function (for call back closure) - if blocks <= ctx.pool.lastState: + if SyncState.blocks <= ctx.pool.lastState: return (ctx.hdrCache.antecedent.number, ctx.subState.top, ctx.subState.head) - if headers <= ctx.pool.lastState: + if SyncState.headers <= ctx.pool.lastState: let b = ctx.chain.baseNumber return (b, b, ctx.subState.head) @@ -60,7 +58,7 @@ proc setupServices*(ctx: BeaconCtxRef; info: static[string]) = # Set up the notifier informing when a new syncer session has started. ctx.hdrCache.start proc() = # Activates the syncer. Work will be picked up by peers when available. - ctx.updateFromHibernateSetTarget info + ctx.updateActivateSyncer() # Manual first run? if 0 < ctx.pool.clReq.consHead.number: diff --git a/execution_chain/sync/beacon/worker/update.nim b/execution_chain/sync/beacon/worker/update.nim index d21db5c0af..4d093ece90 100644 --- a/execution_chain/sync/beacon/worker/update.nim +++ b/execution_chain/sync/beacon/worker/update.nim @@ -15,25 +15,17 @@ import pkg/[chronicles, chronos], pkg/eth/common, ../worker_desc, - ./blocks_staged/staged_queue, - ./headers_staged/staged_queue, - ./[blocks_unproc, headers_unproc] + ./blocks/blocks_unproc, + ./headers # ------------------------------------------------------------------------------ # Private functions, state handler helpers # ------------------------------------------------------------------------------ -proc startHibernating(ctx: BeaconCtxRef; info: static[string]) = - ## Clean up sync scrum target buckets and await a new request from `CL`. +proc updateSuspendSyncer(ctx: BeaconCtxRef) = + ## Clean up sync target buckets, stop syncer activity, and and get ready + ## for awaiting a new request from the `CL`. ## - doAssert ctx.blocksUnprocIsEmpty() - doAssert ctx.blocksStagedQueueIsEmpty() - doAssert ctx.headersUnprocIsEmpty() - doAssert ctx.headersStagedQueueIsEmpty() - doAssert ctx.subState.top == 0 - doAssert ctx.subState.head == 0 - doAssert not ctx.subState.cancelRequest - ctx.hdrCache.clear() ctx.pool.clReq.reset @@ -45,10 +37,9 @@ proc startHibernating(ctx: BeaconCtxRef; info: static[string]) = info "Suspending syncer", base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, nSyncPeers=ctx.pool.nBuddies - proc commitCollectHeaders(ctx: BeaconCtxRef; info: static[string]): bool = ## Link header chain into `FC` module. Gets ready for block import. - + ## # This function does the job linking into `FC` module proper ctx.hdrCache.commit().isOkOr: trace info & ": cannot finalise header chain", @@ -59,15 +50,9 @@ proc commitCollectHeaders(ctx: BeaconCtxRef; info: static[string]): bool = true - proc setupProcessingBlocks(ctx: BeaconCtxRef; info: static[string]) = ## Prepare for blocks processing - doAssert ctx.blocksUnprocIsEmpty() - doAssert ctx.blocksStagedQueueIsEmpty() - doAssert ctx.subState.top == 0 - doAssert ctx.subState.head == 0 - doAssert not ctx.subState.cancelRequest - + ## # Reset for useles block download detection (to avoid deadlock) ctx.pool.failedPeers.clear() ctx.pool.seenData = false @@ -163,7 +148,7 @@ func blocksFinishNext(ctx: BeaconCtxRef; info: static[string]): SyncState = proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = ## Update internal state when needed - # + ## # State machine # :: # idle <---------------+---+---+---. @@ -228,13 +213,31 @@ proc updateSyncState*(ctx: BeaconCtxRef; info: static[string]) = # Final sync scrum layout reached or inconsistent/impossible state if newState == idle: - ctx.startHibernating info + ctx.updateSuspendSyncer() -proc updateFromHibernateSetTarget*( +proc updateAsyncTasks*( ctx: BeaconCtxRef; - info: static[string]; - ) = + ): Future[Opt[void]] {.async: (raises: []).} = + ## Allow task switch by issuing a short sleep request. The `due` argument + ## allows to maintain a minimum time gap when invoking this function. + ## + let start = Moment.now() + if ctx.pool.nextAsyncNanoSleep < start: + + try: await sleepAsync asyncThreadSwitchTimeSlot + except CancelledError: discard + + if ctx.daemon: + ctx.pool.nextAsyncNanoSleep = Moment.now() + asyncThreadSwitchGap + return ok() + # Shutdown? + return err() + + return ok() + + +proc updateActivateSyncer*(ctx: BeaconCtxRef) = ## If in hibernate mode, accept a cache session and activate syncer ## if ctx.hibernate: @@ -258,30 +261,9 @@ proc updateFromHibernateSetTarget*( # Failed somewhere on the way ctx.hdrCache.clear() - debug info & ": activation rejected", base=ctx.chain.baseNumber.bnStr, + debug "Syncer activation rejected", base=ctx.chain.baseNumber.bnStr, head=ctx.chain.latestNumber.bnStr, state=ctx.hdrCache.state - -proc updateAsyncTasks*( - ctx: BeaconCtxRef; - ): Future[Opt[void]] {.async: (raises: []).} = - ## Allow task switch by issuing a short sleep request. The `due` argument - ## allows to maintain a minimum time gap when invoking this function. - ## - let start = Moment.now() - if ctx.pool.nextAsyncNanoSleep < start: - - try: await sleepAsync asyncThreadSwitchTimeSlot - except CancelledError: discard - - if ctx.daemon: - ctx.pool.nextAsyncNanoSleep = Moment.now() + asyncThreadSwitchGap - return ok() - # Shutdown? - return err() - - return ok() - # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/beacon/worker/update/metrics.nim b/execution_chain/sync/beacon/worker/update/metrics.nim index 7362482bf7..472d3f817d 100644 --- a/execution_chain/sync/beacon/worker/update/metrics.nim +++ b/execution_chain/sync/beacon/worker/update/metrics.nim @@ -14,9 +14,7 @@ import pkg/[chronos, metrics], ../../../../networking/p2p, ../../worker_desc, - ../blocks_staged/staged_queue, - ../headers_staged/staged_queue, - ".."/[blocks_unproc, headers_unproc] + ".."/[blocks, headers] declareGauge nec_base, "" & "Max block number of imported finalised blocks" diff --git a/execution_chain/sync/beacon/worker/update/ticker.nim b/execution_chain/sync/beacon/worker/update/ticker.nim index 06183b1396..4fcc8c1fe1 100644 --- a/execution_chain/sync/beacon/worker/update/ticker.nim +++ b/execution_chain/sync/beacon/worker/update/ticker.nim @@ -19,9 +19,7 @@ when enableTicker: import std/strutils, pkg/[stint, stew/interval_set], - ../headers_staged/staged_queue, - ../blocks_staged/staged_queue, - ../[blocks_unproc, headers_unproc] + ".."/[blocks, headers] logScope: topics = "beacon ticker" diff --git a/execution_chain/sync/beacon/worker_desc.nim b/execution_chain/sync/beacon/worker_desc.nim index 95eda61e4f..83b25421e3 100644 --- a/execution_chain/sync/beacon/worker_desc.nim +++ b/execution_chain/sync/beacon/worker_desc.nim @@ -23,6 +23,18 @@ export helpers, sync_desc, worker_const, chain type + BeaconBuddyRef* = BuddyRef[BeaconCtxData,BeaconBuddyData] + ## Extended worker peer descriptor + + BeaconCtxRef* = CtxRef[BeaconCtxData] + ## Extended global descriptor + + # ------------------- + + ImportBlockError* = tuple + info: string + cancelled: bool + BnRangeSet* = IntervalSetRef[BlockNumber,uint64] ## Disjunct sets of block number intervals @@ -95,15 +107,9 @@ type ## Local descriptor data extension nRespErrors*: BuddyError ## Number of errors/slow responses in a row - # Debugging and logging. - nMultiLoop*: int ## Number of runs - stoppedMultiRun*: chronos.Moment ## Time when run-multi stopped - multiRunIdle*: chronos.Duration ## Idle time between runs - BeaconCtxData* = object ## Globally shared data extension nBuddies*: int ## Number of active workers - clReq*: SyncClMesg ## Manual first target set up lastState*: SyncState ## Last known layout state hdrSync*: HeaderFetchSync ## Syncing by linked header chains blkSync*: BlocksFetchSync ## For importing/executing blocks @@ -118,18 +124,14 @@ type nProcError*: Table[Hash,BuddyError] ## Per peer processing error lastSlowPeer*: Opt[Hash] ## Register slow peer when the last one failedPeers*: HashSet[Hash] ## Detect dead end sync by collecting peers - seenData*: bool ## Set `true` is data were fetched, already + seenData*: bool ## Set `true` if data were fetched, already # Debugging stuff + clReq*: SyncClMesg ## Manual first target set up + when enableTicker: ticker*: RootRef ## Logger ticker - BeaconBuddyRef* = BuddyRef[BeaconCtxData,BeaconBuddyData] - ## Extended worker peer descriptor - - BeaconCtxRef* = CtxRef[BeaconCtxData] - ## Extended global descriptor - # ------------------------------------------------------------------------------ # Public helpers # ------------------------------------------------------------------------------ diff --git a/execution_chain/sync/sync_desc.nim b/execution_chain/sync/sync_desc.nim index 233c22eb25..ffeb14ae13 100644 --- a/execution_chain/sync/sync_desc.nim +++ b/execution_chain/sync/sync_desc.nim @@ -26,7 +26,7 @@ type ZombieStop ## Abandon/ignore (wait for pushed out of LRU table) ZombieRun ## Extra zombie state to potentially recover from - BuddyCtrlRef* = ref object + BuddyCtrl* = object ## Control and state settings runState: BuddyRunState ## Access with getters @@ -35,7 +35,7 @@ type ctx*: CtxRef[S] ## Shared data descriptor back reference peer*: Peer ## Reference to eth `p2p` protocol entry peerID*: Hash ## Hash of peer node - ctrl*: BuddyCtrlRef ## Control and state settings + ctrl*: BuddyCtrl ## Control and state settings only*: W ## Worker peer specific data CtxRef*[S] = ref object @@ -57,19 +57,19 @@ proc `$`*[S,W](worker: BuddyRef[S,W]): string = # Public getters, `BuddyRunState` execution control functions # ------------------------------------------------------------------------------ -proc state*(ctrl: BuddyCtrlRef): BuddyRunState = +proc state*(ctrl: BuddyCtrl): BuddyRunState = ## Getter (logging only, details of `BuddyCtrl` are private) ctrl.runState -proc running*(ctrl: BuddyCtrlRef): bool = +proc running*(ctrl: BuddyCtrl): bool = ## Getter, if `true` if `ctrl.state()` is `Running` ctrl.runState == Running -proc stopped*(ctrl: BuddyCtrlRef): bool = +proc stopped*(ctrl: BuddyCtrl): bool = ## Getter, if `true`, if `ctrl.state()` is not `Running` ctrl.runState != Running -proc zombie*(ctrl: BuddyCtrlRef): bool = +proc zombie*(ctrl: BuddyCtrl): bool = ## Getter, `true` if `ctrl.state()` is `Zombie` (i.e. not `running()` and ## not `stopped()`) ctrl.runState in {ZombieStop, ZombieRun} @@ -78,7 +78,7 @@ proc zombie*(ctrl: BuddyCtrlRef): bool = # Public setters, `BuddyRunState` execution control functions # ------------------------------------------------------------------------------ -proc `zombie=`*(ctrl: BuddyCtrlRef; value: bool) = +proc `zombie=`*(ctrl: var BuddyCtrl; value: bool) = ## Setter if value: case ctrl.runState: @@ -97,7 +97,7 @@ proc `zombie=`*(ctrl: BuddyCtrlRef; value: bool) = else: discard -proc `stopped=`*(ctrl: BuddyCtrlRef; value: bool) = +proc `stopped=`*(ctrl: var BuddyCtrl; value: bool) = ## Setter if value: case ctrl.runState: @@ -112,8 +112,8 @@ proc `stopped=`*(ctrl: BuddyCtrlRef; value: bool) = else: discard -proc `forceRun=`*(ctrl: BuddyCtrlRef; value: bool) = - ## Setter, gets out of `Zombie` jail/locked state with `true argument. +proc `forceRun=`*(ctrl: var BuddyCtrl; value: bool) = + ## Setter, gets out of `Zombie` jail/locked state with `true` argument. if value: ctrl.runState = Running else: diff --git a/execution_chain/sync/sync_sched.nim b/execution_chain/sync/sync_sched.nim index b6f3bc074c..f6e9e5c7cd 100644 --- a/execution_chain/sync/sync_sched.nim +++ b/execution_chain/sync/sync_sched.nim @@ -85,11 +85,9 @@ import std/hashes, - chronos, + pkg/[chronos, stew/keyed_queue], ../networking/[p2p, peer_pool], - stew/keyed_queue, - ./sync_desc, - ./wire_protocol + ./[sync_desc, wire_protocol] type ActiveBuddies[S,W] = ##\ @@ -408,7 +406,6 @@ proc onPeerConnected[S,W](dsc: RunnerSyncRef[S,W]; peer: Peer) = dsc: dsc, worker: BuddyRef[S,W]( ctx: dsc.ctx, - ctrl: BuddyCtrlRef(), peer: peer, peerID: peer.key.hash)) if not buddy.worker.runStart(): From a7d5ff3eac5b84e5ad061b469e1f13a784043b47 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 23 Jun 2025 04:13:08 +0700 Subject: [PATCH 099/138] Fix revert reason encoding (#3420) --- execution_chain/evm/evm_errors.nim | 41 ++++++++++++++----- .../interpreter/op_handlers/oph_sysops.nim | 9 ++-- tests/test_evm_errors.nim | 18 ++++++-- 3 files changed, 49 insertions(+), 19 deletions(-) diff --git a/execution_chain/evm/evm_errors.nim b/execution_chain/evm/evm_errors.nim index 17cbfcb062..9de954088b 100644 --- a/execution_chain/evm/evm_errors.nim +++ b/execution_chain/evm/evm_errors.nim @@ -14,9 +14,8 @@ import std/tables, results, stint, - stew/[assign2, byteutils], - eth/common/hashes, - web3/encoding + stew/byteutils, + eth/common/hashes export results @@ -101,20 +100,42 @@ const panicReasons = { 0x51: "uninitialized function", }.toTable +func decodeU256(input: openArray[byte]): Opt[UInt256] = + if input.len < 32: + return Opt.none(UInt256) + Opt.some(UInt256.fromBytesBE(input.toOpenArray(0, 31))) + +func decodeString(input: openArray[byte]): Opt[string] = + let + offset256 = ?decodeU256(input) + offset = offset256.truncate(int) + + if offset >= input.len: + return Opt.none(string) + + let + len256 = ?decodeU256(input.toOpenArray(offset, input.len-1)) + len = len256.truncate(int) + dataOffset = offset + 32 + + if dataOffset + len >= input.len: + return Opt.none(string) + + ok(string.fromBytes(input.toOpenArray(dataOffset, dataOffset + len - 1))) + # UnpackRevert resolves the abi-encoded revert reason. According to the solidity # spec https://solidity.readthedocs.io/en/latest/control-structures.html#revert, # the provided revert reason is abi-encoded as if it were a call to function # `Error(string)` or `Panic(uint256)`. -proc unpackRevertReason*(data: openArray[byte], reason: var string) = +proc unpackRevertReason*(data: openArray[byte]): Opt[string] = if data.len() < 4: - reason = "" - return + return Opt.none(string) let selector = data[0..3] if selector == revertSelector: - discard decode(data.toOpenArray(4, data.len() - 1), 0, 0, reason) + return decodeString(data.toOpenArray(4, data.len() - 1)) elif selector == panicSelector: - var reasonCode: UInt256 - discard decode(data.toOpenArray(4, data.len() - 1), 0, 0, reasonCode) - assign(reason, panicReasons.getOrDefault(reasonCode.truncate(int))) + let reasonCode = decodeU256(data.toOpenArray(4, data.len() - 1)).valueOr: + return Opt.none(string) + return ok(panicReasons.getOrDefault(reasonCode.truncate(int))) diff --git a/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim b/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim index 89cf368fed..9fbc9d1184 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_sysops.nim @@ -64,13 +64,12 @@ proc revertOp(cpt: VmCpt): EvmResultVoid = cpt.memory.extend(pos, len) assign(cpt.output, cpt.memory.read(pos, len)) - - var revertReason: string - unpackRevertReason(cpt.output, revertReason) + + let revertReason = unpackRevertReason(cpt.output) let revertMsg = - if revertReason.len() > 0: - "execution reverted: " & revertReason + if revertReason.isSome: + "execution reverted: " & revertReason.value else: "execution reverted" diff --git a/tests/test_evm_errors.nim b/tests/test_evm_errors.nim index 26eaa892c1..2bb5712c3b 100644 --- a/tests/test_evm_errors.nim +++ b/tests/test_evm_errors.nim @@ -14,13 +14,23 @@ suite "EVM errors tests": test "unpack revert reason data": let data = "0x08c379a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000018556e69737761705632526f757465723a20455850495245440000000000000000" - var revertReason: string - unpackRevertReason(data.hexToSeqByte(), revertReason) + let revertReason = unpackRevertReason(data.hexToSeqByte()).expect("something") check revertReason == "UniswapV2Router: EXPIRED" + test "unpack revert reason data missing bytes": + let data = "0x08c379a0000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000018556e69737761705632526f757465723a20455850495245440000000000000000" + + let revertReason = unpackRevertReason(data.hexToSeqByte()) + check revertReason == Opt.none(string) + test "unpack panic reason data": let data = "0x4e487b710000000000000000000000000000000000000000000000000000000000000032" - var revertReason: string - unpackRevertReason(data.hexToSeqByte(), revertReason) + var revertReason = unpackRevertReason(data.hexToSeqByte()).expect("something") check revertReason == "out-of-bounds access of an array or bytesN" + + test "unpack panic reason data missing byte": + let data = "0x4e487b7100000000000000000000000000000000000000000000000000000000000032" + + var revertReason = unpackRevertReason(data.hexToSeqByte()) + check revertReason == Opt.none(string) From 19e4e063ce95c55d1d187612263d5da4d84751d8 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Mon, 23 Jun 2025 12:35:06 +0530 Subject: [PATCH 100/138] add eip-7883 changes (#3399) * add eip-7883 changes * Update execution_chain/evm/precompiles.nim --- execution_chain/evm/precompiles.nim | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/execution_chain/evm/precompiles.nim b/execution_chain/evm/precompiles.nim index 6c7f1df90d..a0f3065482 100644 --- a/execution_chain/evm/precompiles.nim +++ b/execution_chain/evm/precompiles.nim @@ -205,11 +205,11 @@ func modExpFee(c: Computation, result = result * result func mulComplexityEIP7883(maxLen: UInt256): UInt256 = - # gas = ceil(x div 8) ^ 2 - result = maxLen + 7 - result = result div 8 - result = result * result + result = 16.u256 if maxLen > 32.u256: + result = maxLen + 7 + result = result div 8 + result = result * result result = result * 2 let adjExpLen = block: From f8a60443700a0b8aaa93925558dfabcc8d116d6b Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Mon, 23 Jun 2025 17:11:20 +0530 Subject: [PATCH 101/138] secp256r1 precompile (#3382) * secp256r1 impl * remove unwanted stuff * fix double hashing * remove commented code * add complete test suite * improve performance * fix gas calculation * fix unwanted ECC Points * few more fixes * fix weird EIP, no error but consume gas * cleanup * some more unwanted code cleanup --- PrecompileTests.md | 5 +- execution_chain/evm/interpreter/gas_costs.nim | 1 + execution_chain/evm/precompiles.nim | 171 +- execution_chain/evm/secp256r1verify.nim | 44 + .../fixtures/PrecompileTests/P256Verify.json | 5473 +++++++++++++++++ tests/test_precompiles.nim | 5 +- 6 files changed, 5653 insertions(+), 46 deletions(-) create mode 100644 execution_chain/evm/secp256r1verify.nim create mode 100644 tests/fixtures/PrecompileTests/P256Verify.json diff --git a/PrecompileTests.md b/PrecompileTests.md index 1c27644cb8..00a8f0048b 100644 --- a/PrecompileTests.md +++ b/PrecompileTests.md @@ -2,6 +2,7 @@ PrecompileTests === ## PrecompileTests ```diff ++ P256Verify.json OK + blake2F.json OK + blsG1Add.json OK + blsG1MultiExp.json OK @@ -24,7 +25,7 @@ PrecompileTests + ripemd160.json OK + sha256.json OK ``` -OK: 21/21 Fail: 0/21 Skip: 0/21 +OK: 22/22 Fail: 0/22 Skip: 0/22 ## eest ```diff + add_G1_bls.json OK @@ -49,4 +50,4 @@ OK: 21/21 Fail: 0/21 Skip: 0/21 OK: 18/18 Fail: 0/18 Skip: 0/18 ---TOTAL--- -OK: 39/39 Fail: 0/39 Skip: 0/39 +OK: 40/40 Fail: 0/40 Skip: 0/40 diff --git a/execution_chain/evm/interpreter/gas_costs.nim b/execution_chain/evm/interpreter/gas_costs.nim index 684d3d5b0b..c640216a3b 100644 --- a/execution_chain/evm/interpreter/gas_costs.nim +++ b/execution_chain/evm/interpreter/gas_costs.nim @@ -858,3 +858,4 @@ const Bls12381PairingPerPairGas* = GasInt 32600 Bls12381MapG1Gas* = GasInt 5500 Bls12381MapG2Gas* = GasInt 23800 + GasP256VerifyGas* = GasInt 3450 diff --git a/execution_chain/evm/precompiles.nim b/execution_chain/evm/precompiles.nim index a0f3065482..66d9d7fdac 100644 --- a/execution_chain/evm/precompiles.nim +++ b/execution_chain/evm/precompiles.nim @@ -17,56 +17,111 @@ import chronicles, nimcrypto/[ripemd, sha2, utils], bncurve/[fields, groups], - stew/assign2, + stew/[assign2, endians2, byteutils], + libp2p/crypto/ecnist, ../common/evmforks, ../core/eip4844, ./modexp, ./evm_errors, ./computation, + ./secp256r1verify, eth/common/[base, addresses] type - PrecompileAddresses* = enum + Precompiles* = enum # Frontier to Spurious Dragron - paEcRecover = 0x01 - paSha256 = 0x02 - paRipeMd160 = 0x03 - paIdentity = 0x04 + paEcRecover + paSha256 + paRipeMd160 + paIdentity # Byzantium and Constantinople - paModExp = 0x05 - paEcAdd = 0x06 - paEcMul = 0x07 - paPairing = 0x08 + paModExp + paEcAdd + paEcMul + paPairing # Istanbul - paBlake2bf = 0x09 + paBlake2bf # Cancun - paPointEvaluation = 0x0a + paPointEvaluation # Prague (EIP-2537) - paBlsG1Add = 0x0b - paBlsG1MultiExp = 0x0c - paBlsG2Add = 0x0d - paBlsG2MultiExp = 0x0e - paBlsPairing = 0x0f - paBlsMapG1 = 0x10 - paBlsMapG2 = 0x11 + paBlsG1Add + paBlsG1MultiExp + paBlsG2Add + paBlsG2MultiExp + paBlsPairing + paBlsMapG1 + paBlsMapG2 + # Osaka + paP256Verify SigRes = object msgHash: array[32, byte] sig: Signature +const + # Frontier to Spurious Dragron + paEcRecoverAddress = address"0x0000000000000000000000000000000000000001" + paSha256Address = address"0x0000000000000000000000000000000000000002" + paRipeMd160Address = address"0x0000000000000000000000000000000000000003" + paIdentityAddress = address"0x0000000000000000000000000000000000000004" + + # Byzantium and Constantinople + paModExpAddress = address"0x0000000000000000000000000000000000000005" + paEcAddAddress = address"0x0000000000000000000000000000000000000006" + paEcMulAddress = address"0x0000000000000000000000000000000000000007" + paPairingAddress = address"0x0000000000000000000000000000000000000008" + + # Istanbul + paBlake2bfAddress = address"0x0000000000000000000000000000000000000009" + + # Cancun + paPointEvaluationAddress = address"0x000000000000000000000000000000000000000a" + + # Prague (EIP-2537) + paBlsG1AddAddress = address"0x000000000000000000000000000000000000000b" + paBlsG1MultiExpAddress = address"0x000000000000000000000000000000000000000c" + paBlsG2AddAddress = address"0x000000000000000000000000000000000000000d" + paBlsG2MultiExpAddress = address"0x000000000000000000000000000000000000000e" + paBlsPairingAddress = address"0x000000000000000000000000000000000000000f" + paBlsMapG1Address = address"0x0000000000000000000000000000000000000010" + paBlsMapG2Address = address"0x0000000000000000000000000000000000000011" + + # Osaka + paP256VerifyAddress = address"0x0000000000000000000000000000000000000100" + + precompileAddrs*: array[Precompiles, Address] = [ + paEcRecoverAddress, # paEcRecover + paSha256Address, # paSha256 + paRipeMd160Address, # paRipeMd160 + paIdentityAddress, # paIdentity + paModExpAddress, # paModExp + paEcAddAddress, # paEcAdd + paEcMulAddress, # paEcMul + paPairingAddress, # paPairing + paBlake2bfAddress, # paBlake2bf + paPointEvaluationAddress, # paPointEvaluation + paBlsG1AddAddress, # paBlsG1Add + paBlsG1MultiExpAddress, # paBlsG1MultiExp + paBlsG2AddAddress, # paBlsG2Add + paBlsG2MultiExpAddress, # paBlsG2MultiExp + paBlsPairingAddress, # paBlsPairing + paBlsMapG1Address, # paBlsMapG1 + paBlsMapG2Address, # paBlsMapG2 + paP256VerifyAddress # paP256Verify + ] + + # ------------------------------------------------------------------------------ # Private functions # ------------------------------------------------------------------------------ -func getMaxPrecompileAddr(fork: EVMFork): PrecompileAddresses = +func getMaxPrecompile(fork: EVMFork): Precompiles = if fork < FkByzantium: paIdentity elif fork < FkIstanbul: paPairing elif fork < FkCancun: paBlake2bf elif fork < FkPrague: paPointEvaluation - else: PrecompileAddresses.high - -func validPrecompileAddr(addrByte, maxPrecompileAddr: byte): bool = - (addrByte in PrecompileAddresses.low.byte .. maxPrecompileAddr) + elif fork < FkOsaka: paBlsMapG2 + else: Precompiles.high func getSignature(c: Computation): EvmResult[SigRes] = # input is Hash, V, R, S @@ -694,36 +749,67 @@ proc pointEvaluation(c: Computation): EvmResultVoid = c.output = @PointEvaluationResult ok() +proc p256verify(c: Computation): EvmResultVoid = + + template failed() = + c.output.setLen(0) + return ok() + + ? c.gasMeter.consumeGas(GasP256VerifyGas, reason="P256VERIFY Precompile") + + if c.msg.data.len != 160: + failed() + + var inputPubKey: array[65, byte] + + # Validations + if isInfinityByte(c.msg.data.toOpenArray(96, 159)): + failed() + + # Check scalar and field bounds (r, s ∈ (0, n), qx, qy ∈ [0, p)) + var sig: EcSignature + if not sig.initRaw(c.msg.data.toOpenArray(32, 95)): + failed() + + var pubkey: EcPublicKey + inputPubKey[0] = 4.byte + assign(inputPubKey.toOpenArray(1, 64), c.msg.data.toOpenArray(96, 159)) + + if not pubkey.initRaw(inputPubKey): + failed() + + let isValid = sig.verifyRaw(c.msg.data.toOpenArray(0, 31), pubkey) + + if isValid: + c.output.setLen(32) + c.output[^1] = 1.byte # return 0x...01 + else: + c.output.setLen(0) + + ok() + # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ iterator activePrecompiles*(fork: EVMFork): Address = - var res: Address - let maxPrecompileAddr = getMaxPrecompileAddr(fork) - for c in PrecompileAddresses.low..maxPrecompileAddr: - if validPrecompileAddr(c.byte, maxPrecompileAddr.byte): - res.data[^1] = c.byte - yield res + let maxPrecompile = getMaxPrecompile(fork) + for c in Precompiles.low..maxPrecompile: + yield precompileAddrs[c] func activePrecompilesList*(fork: EVMFork): seq[Address] = for address in activePrecompiles(fork): result.add address -proc getPrecompile*(fork: EVMFork, b: byte): Opt[PrecompileAddresses] = - let maxPrecompileAddr = getMaxPrecompileAddr(fork) - if validPrecompileAddr(b, maxPrecompileAddr.byte): - Opt.some(PrecompileAddresses(b)) - else: - Opt.none(PrecompileAddresses) +proc getPrecompile*(fork: EVMFork, codeAddress: Address): Opt[Precompiles] = + let maxPrecompile = getMaxPrecompile(fork) + for c in Precompiles.low..maxPrecompile: + if precompileAddrs[c] == codeAddress: + return Opt.some(c) -proc getPrecompile*(fork: EVMFork, codeAddress: Address): Opt[PrecompileAddresses] = - for i in 0..18: - if codeAddress.data[i] != 0: - return Opt.none(PrecompileAddresses) - getPrecompile(fork, codeAddress.data[19]) + Opt.none(Precompiles) -proc execPrecompile*(c: Computation, precompile: PrecompileAddresses) = +proc execPrecompile*(c: Computation, precompile: Precompiles) = let fork = c.fork let res = case precompile of paEcRecover: ecRecover(c) @@ -743,6 +829,7 @@ proc execPrecompile*(c: Computation, precompile: PrecompileAddresses) = of paBlsPairing: blsPairing(c) of paBlsMapG1: blsMapG1(c) of paBlsMapG2: blsMapG2(c) + of paP256Verify: p256verify(c) if res.isErr: if res.error.code == EvmErrorCode.OutOfGas: diff --git a/execution_chain/evm/secp256r1verify.nim b/execution_chain/evm/secp256r1verify.nim new file mode 100644 index 0000000000..c1f7568327 --- /dev/null +++ b/execution_chain/evm/secp256r1verify.nim @@ -0,0 +1,44 @@ +# Nimbus +# Copyright (c) 2018-2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. + +import + libp2p/crypto/ecnist, + bearssl/[ec, hash] + +proc isInfinityByte*(data: openArray[byte]): bool = + ## Check if all values in ``data`` are zero. + for b in data: + if b != 0: + return false + return true + +proc verifyRaw*[T: byte | char]( + sig: EcSignature, message: openArray[T], pubkey: ecnist.EcPublicKey +): bool {.inline.} = + ## Verify ECDSA signature ``sig`` using public key ``pubkey`` and data + ## ``message``. + ## + ## Return ``true`` if message verification succeeded, ``false`` if + ## verification failed. + doAssert((not isNil(sig)) and (not isNil(pubkey))) + var hc: HashCompatContext + var hash: array[32, byte] + var impl = ecGetDefault() + if pubkey.key.curve in EcSupportedCurvesCint: + let res = ecdsaI31VrfyRaw( + impl, + addr message[0], + uint(len(message)), + unsafeAddr pubkey.key, + addr sig.buffer[0], + uint(len(sig.buffer)), + ) + # Clear context with initial value + result = (res == 1) \ No newline at end of file diff --git a/tests/fixtures/PrecompileTests/P256Verify.json b/tests/fixtures/PrecompileTests/P256Verify.json new file mode 100644 index 0000000000..18a2be5e4b --- /dev/null +++ b/tests/fixtures/PrecompileTests/P256Verify.json @@ -0,0 +1,5473 @@ +{ + "func": "p256verify", + "fork": "osaka", + "data": [ + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b855d442f5b3c7b11eb6c4e0ae7525fe710fab9aa7c77a67f79e6fadd762927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #1: signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5740946b2a147f59262ee6f5bc90bd01ed280528b62b3aed5fc93f06f739b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #3: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #5: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b865d442f5a3c7b11eb6c4e0ae79578ec6353a20bf783ecb4b6ea97b8252927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #8: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #9: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #10: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #11: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #12: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #13: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #14: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #15: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #16: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #17: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #18: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #19: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #20: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #21: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #22: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255100000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #23: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255100000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #24: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #25: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #26: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #27: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #28: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #29: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #30: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #31: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #32: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #33: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #34: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #35: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #36: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255200000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #37: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255200000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #38: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #39: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #40: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #41: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #42: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #43: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #44: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #45: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #46: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #47: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #48: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #49: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #50: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff0000000100000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #51: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff0000000100000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #52: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #53: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #54: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #55: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #56: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #57: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "70239dd877f7c944c422f44dea4ed1a52f2627416faf2f072fa50c772ed6f80764a1aab5000d0e804f3e2fc02bdee9be8ff312334e2ba16d11547c97711c898e6af015971cc30be6d1a206d4e013e0997772a2f91d73286ffd683b9bb2cf4f1b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #58: Edge case for Shamir multiplication", + "NoBenchmark": false + }, + { + "Input": "00000000690ed426ccf17803ebe2bd0884bcd58a1bb5e7477ead3645f356e7a916aea964a2f6506d6f78c81c91fc7e8bded7d397738448de1e19a0ec580bf266252cd762130c6667cfe8b7bc47d27d78391e8e80c578d1cd38c3ff033be928e92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #59: special case hash", + "NoBenchmark": false + }, + { + "Input": "7300000000213f2a525c6035725235c2f696ad3ebb5ee47f140697ad25770d919cc98be2347d469bf476dfc26b9b733df2d26d6ef524af917c665baccb23c882093496459effe2d8d70727b82462f61d0ec1b7847929d10ea631dacb16b56c322927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #60: special case hash", + "NoBenchmark": false + }, + { + "Input": "ddf2000000005e0be0635b245f0b97978afd25daadeb3edb4a0161c27fe0604573b3c90ecd390028058164524dde892703dce3dea0d53fa8093999f07ab8aa432f67b0b8e20636695bb7d8bf0a651c802ed25a395387b5f4188c0c4075c886342927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #61: special case hash", + "NoBenchmark": false + }, + { + "Input": "67ab1900000000784769c4ecb9e164d6642b8499588b89855be1ec355d0841a0bfab3098252847b328fadf2f89b95c851a7f0eb390763378f37e90119d5ba3ddbdd64e234e832b1067c2d058ccb44d978195ccebb65c2aaf1e2da9b8b4987e3b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #62: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2bf09460000000076d7dbeffe125eaf02095dff252ee905e296b6350fc311cf204a9784074b246d8bf8bf04a4ceb1c1f1c9aaab168b1596d17093c5cd21d2cd51cce41670636783dc06a759c8847868a406c2506fe17975582fe648d1d88b522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #63: special case hash", + "NoBenchmark": false + }, + { + "Input": "3554e827c700000000e1e75e624a06b3a0a353171160858129e15c544e4f0e65ed66dc34f551ac82f63d4aa4f81fe2cb0031a91d1314f835027bca0f1ceeaa0399ca123aa09b13cd194a422e18d5fda167623c3f6e5d4d6abb8953d67c0c48c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #64: special case hash", + "NoBenchmark": false + }, + { + "Input": "9b6cd3b812610000000026941a0f0bb53255ea4c9fd0cb3426e3a54b9fc6965c060b700bef665c68899d44f2356a578d126b062023ccc3c056bf0f60a237012b8d186c027832965f4fcc78a3366ca95dedbb410cbef3f26d6be5d581c11d36102927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #65: special case hash", + "NoBenchmark": false + }, + { + "Input": "883ae39f50bf0100000000e7561c26fc82a52baa51c71ca877162f93c4ae01869f6adfe8d5eb5b2c24d7aa7934b6cf29c93ea76cd313c9132bb0c8e38c96831db26a9c9e40e55ee0890c944cf271756c906a33e66b5bd15e051593883b5e99022927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #66: special case hash", + "NoBenchmark": false + }, + { + "Input": "a1ce5d6e5ecaf28b0000000000fa7cd010540f420fb4ff7401fe9fce011d0ba6a1af03ca91677b673ad2f33615e56174a1abf6da168cebfa8868f4ba273f16b720aa73ffe48afa6435cd258b173d0c2377d69022e7d098d75caf24c8c5e06b1c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #67: special case hash", + "NoBenchmark": false + }, + { + "Input": "8ea5f645f373f580930000000038345397330012a8ee836c5494cdffd5ee8054fdc70602766f8eed11a6c99a71c973d5659355507b843da6e327a28c11893db93df5349688a085b137b1eacf456a9e9e0f6d15ec0078ca60a7f83f2b10d213502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #68: special case hash", + "NoBenchmark": false + }, + { + "Input": "660570d323e9f75fa734000000008792d65ce93eabb7d60d8d9c1bbdcb5ef305b516a314f2fce530d6537f6a6c49966c23456f63c643cf8e0dc738f7b876e675d39ffd033c92b6d717dd536fbc5efdf1967c4bd80954479ba66b0120cd16fff22927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #69: special case hash", + "NoBenchmark": false + }, + { + "Input": "d0462673154cce587dde8800000000e98d35f1f45cf9c3bf46ada2de4c568c343b2cbf046eac45842ecb7984d475831582717bebb6492fd0a485c101e29ff0a84c9b7b47a98b0f82de512bc9313aaf51701099cac5f76e68c8595fc1c1d992582927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #70: special case hash", + "NoBenchmark": false + }, + { + "Input": "bd90640269a7822680cedfef000000000caef15a6171059ab83e7b4418d7278f30c87d35e636f540841f14af54e2f9edd79d0312cfa1ab656c3fb15bfde48dcf47c15a5a82d24b75c85a692bd6ecafeb71409ede23efd08e0db9abf6340677ed2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #71: special case hash", + "NoBenchmark": false + }, + { + "Input": "33239a52d72f1311512e41222a00000000d2dcceb301c54b4beae8e284788a7338686ff0fda2cef6bc43b58cfe6647b9e2e8176d168dec3c68ff262113760f52067ec3b651f422669601662167fa8717e976e2db5e6a4cf7c2ddabb3fde9d67d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #72: special case hash", + "NoBenchmark": false + }, + { + "Input": "b8d64fbcd4a1c10f1365d4e6d95c000000007ee4a21a1cbe1dc84c2d941ffaf144a3e23bf314f2b344fc25c7f2de8b6af3e17d27f5ee844b225985ab6e2775cf2d48e223205e98041ddc87be532abed584f0411f5729500493c9cc3f4dd15e862927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #73: special case hash", + "NoBenchmark": false + }, + { + "Input": "01603d3982bf77d7a3fef3183ed092000000003a227420db4088b20fe0e9d84a2ded5b7ec8e90e7bf11f967a3d95110c41b99db3b5aa8d330eb9d638781688e97d5792c53628155e1bfc46fb1a67e3088de049c328ae1f44ec69238a009808f92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #74: special case hash", + "NoBenchmark": false + }, + { + "Input": "9ea6994f1e0384c8599aa02e6cf66d9c000000004d89ef50b7e9eb0cfbff7363bdae7bcb580bf335efd3bc3d31870f923eaccafcd40ec2f605976f15137d8b8ff6dfa12f19e525270b0106eecfe257499f373a4fb318994f24838122ce7ec3c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #75: special case hash", + "NoBenchmark": false + }, + { + "Input": "d03215a8401bcf16693979371a01068a4700000000e2fa5bf692bc670905b18c50f9c4f0cd6940e162720957ffff513799209b78596956d21ece251c2401f1c6d7033a0a787d338e889defaaabb106b95a4355e411a59c32aa5167dfab2447262927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #76: special case hash", + "NoBenchmark": false + }, + { + "Input": "307bfaaffb650c889c84bf83f0300e5dc87e000000008408fd5f64b582e3bb14f612820687604fa01906066a378d67540982e29575d019aabe90924ead5c860d3f9367702dd7dd4f75ea98afd20e328a1a99f4857b316525328230ce294b0fef2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #77: special case hash", + "NoBenchmark": false + }, + { + "Input": "bab5c4f4df540d7b33324d36bb0c157551527c00000000e4af574bb4d54ea6b89505e407657d6e8bc93db5da7aa6f5081f61980c1949f56b0f2f507da5782a7ac60d31904e3669738ffbeccab6c3656c08e0ed5cb92b3cfa5e7f71784f9c50212927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #78: special case hash", + "NoBenchmark": false + }, + { + "Input": "d4ba47f6ae28f274e4f58d8036f9c36ec2456f5b00000000c3b869197ef5e15ebbd16fbbb656b6d0d83e6a7787cd691b08735aed371732723e1c68a40404517d9d8e35dba96028b7787d91315be675877d2d097be5e8ee34560e3e7fd25c0f002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #79: special case hash", + "NoBenchmark": false + }, + { + "Input": "79fd19c7235ea212f29f1fa00984342afe0f10aafd00000000801e47f8c184e12ec9760122db98fd06ea76848d35a6da442d2ceef7559a30cf57c61e92df327e7ab271da90859479701fccf86e462ee3393fb6814c27b760c4963625c0a198782927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #80: special case hash", + "NoBenchmark": false + }, + { + "Input": "8c291e8eeaa45adbaf9aba5c0583462d79cbeb7ac97300000000a37ea6700cda54e76b7683b6650baa6a7fc49b1c51eed9ba9dd463221f7a4f1005a89fe00c592ea076886c773eb937ec1cc8374b7915cfd11b1c1ae1166152f2f7806a31c8fd2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #81: special case hash", + "NoBenchmark": false + }, + { + "Input": "0eaae8641084fa979803efbfb8140732f4cdcf66c3f78a000000003c278a6b215291deaf24659ffbbce6e3c26f6021097a74abdbb69be4fb10419c0c496c946665d6fcf336d27cc7cdb982bb4e4ecef5827f84742f29f10abf83469270a03dc32927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #82: special case hash", + "NoBenchmark": false + }, + { + "Input": "e02716d01fb23a5a0068399bf01bab42ef17c6d96e13846c00000000afc0f89d207a3241812d75d947419dc58efb05e8003b33fc17eb50f9d15166a88479f107cdee749f2e492b213ce80b32d0574f62f1c5d70793cf55e382d5caadf75927672927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #83: special case hash", + "NoBenchmark": false + }, + { + "Input": "9eb0bf583a1a6b9a194e9a16bc7dab2a9061768af89d00659a00000000fc7de16554e49f82a855204328ac94913bf01bbe84437a355a0a37c0dee3cf81aa7728aea00de2507ddaf5c94e1e126980d3df16250a2eaebc8be486effe7f22b4f9292927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #84: special case hash", + "NoBenchmark": false + }, + { + "Input": "62aac98818b3b84a2c214f0d5e72ef286e1030cb53d9a82b690e00000000cd15a54c5062648339d2bff06f71c88216c26c6e19b4d80a8c602990ac82707efdfce99bbe7fcfafae3e69fd016777517aa01056317f467ad09aff09be73c9731b0d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #85: special case hash", + "NoBenchmark": false + }, + { + "Input": "3760a7f37cf96218f29ae43732e513efd2b6f552ea4b6895464b9300000000c8975bd7157a8d363b309f1f444012b1a1d23096593133e71b4ca8b059cff37eaf7faa7a28b1c822baa241793f2abc930bd4c69840fe090f2aacc46786bf9196222927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #86: special case hash", + "NoBenchmark": false + }, + { + "Input": "0da0a1d2851d33023834f2098c0880096b4320bea836cd9cbb6ff6c8000000005694a6f84b8f875c276afd2ebcfe4d61de9ec90305afb1357b95b3e0da43885e0dffad9ffd0b757d8051dec02ebdf70d8ee2dc5c7870c0823b6ccc7c679cbaa42927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #87: special case hash", + "NoBenchmark": false + }, + { + "Input": "ffffffff293886d3086fd567aafd598f0fe975f735887194a764a231e82d289aa0c30e8026fdb2b4b4968a27d16a6d08f7098f1a98d21620d7454ba9790f1ba65e470453a8a399f15baf463f9deceb53acc5ca64459149688bd2760c654243392927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #88: special case hash", + "NoBenchmark": false + }, + { + "Input": "7bffffffff2376d1e3c03445a072e24326acdc4ce127ec2e0e8d9ca99527e7b7614ea84acf736527dd73602cd4bb4eea1dfebebd5ad8aca52aa0228cf7b99a88737cc85f5f2d2f60d1b8183f3ed490e4de14368e96a9482c2a4dd193195c902f2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #89: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2b5ffffffffebb251b085377605a224bc80872602a6e467fd016807e97fa395bead6734ebe44b810d3fb2ea00b1732945377338febfd439a8d74dfbd0f942fa6bb18eae36616a7d3cad35919fd21a8af4bbe7a10f73b3e036a46b103ef56e2a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #90: special case hash", + "NoBenchmark": false + }, + { + "Input": "641227ffffffff6f1b96fa5f097fcf3cc1a3c256870d45a67b83d0967d4b20c0499625479e161dacd4db9d9ce64854c98d922cbf212703e9654fae182df9bad242c177cf37b8193a0131108d97819edd9439936028864ac195b64fca76d9d6932927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #91: special case hash", + "NoBenchmark": false + }, + { + "Input": "958415d8ffffffffabad03e2fc662dc3ba203521177502298df56f36600e0f8b08f16b8093a8fb4d66a2c8065b541b3d31e3bfe694f6b89c50fb1aaa6ff6c9b29d6455e2d5d1779748573b611cb95d4a21f967410399b39b535ba3e5af81ca2e2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #92: special case hash", + "NoBenchmark": false + }, + { + "Input": "f1d8de4858ffffffff1281093536f47fe13deb04e1fbe8fb954521b6975420f8be26231b6191658a19dd72ddb99ed8f8c579b6938d19bce8eed8dc2b338cb5f8e1d9a32ee56cffed37f0f22b2dcb57d5c943c14f79694a03b9c5e96952575c892927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #93: special case hash", + "NoBenchmark": false + }, + { + "Input": "0927895f2802ffffffff10782dd14a3b32dc5d47c05ef6f1876b95c81fc31def15e76880898316b16204ac920a02d58045f36a229d4aa4f812638c455abe0443e74d357d3fcb5c8c5337bd6aba4178b455ca10e226e13f9638196506a19391232927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #94: special case hash", + "NoBenchmark": false + }, + { + "Input": "60907984aa7e8effffffff4f332862a10a57c3063fb5a30624cf6a0c3ac80589352ecb53f8df2c503a45f9846fc28d1d31e6307d3ddbffc1132315cc07f16dad1348dfa9c482c558e1d05c5242ca1c39436726ecd28258b1899792887dd0a3c62927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #95: special case hash", + "NoBenchmark": false + }, + { + "Input": "c6ff198484939170ffffffff0af42cda50f9a5f50636ea6942d6b9b8cd6ae1e24a40801a7e606ba78a0da9882ab23c7677b8642349ed3d652c5bfa5f2a9558fb3a49b64848d682ef7f605f2832f7384bdc24ed2925825bf8ea77dc59817257822927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #96: special case hash", + "NoBenchmark": false + }, + { + "Input": "de030419345ca15c75ffffffff8074799b9e0956cc43135d16dfbe4d27d7e68deacc5e1a8304a74d2be412b078924b3bb3511bac855c05c9e5e9e44df3d61e967451cd8e18d6ed1885dd827714847f96ec4bb0ed4c36ce9808db8f714204f6d12927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #97: special case hash", + "NoBenchmark": false + }, + { + "Input": "6f0e3eeaf42b28132b88fffffffff6c8665604d34acb19037e1ab78caaaac6ff2f7a5e9e5771d424f30f67fdab61e8ce4f8cd1214882adb65f7de94c31577052ac4e69808345809b44acb0b2bd889175fb75dd050c5a449ab9528f8f78daa10c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #98: special case hash", + "NoBenchmark": false + }, + { + "Input": "cdb549f773b3e62b3708d1ffffffffbe48f7c0591ddcae7d2cb222d1f8017ab9ffcda40f792ce4d93e7e0f0e95e1a2147dddd7f6487621c30a03d710b330021979938b55f8a17f7ed7ba9ade8f2065a1fa77618f0b67add8d58c422c2453a49a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #99: special case hash", + "NoBenchmark": false + }, + { + "Input": "2c3f26f96a3ac0051df4989bffffffff9fd64886c1dc4f9924d8fd6f0edb048481f2359c4faba6b53d3e8c8c3fcc16a948350f7ab3a588b28c17603a431e39a8cd6f6a5cc3b55ead0ff695d06c6860b509e46d99fccefb9f7f9e101857f743002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #100: special case hash", + "NoBenchmark": false + }, + { + "Input": "ac18f8418c55a2502cb7d53f9affffffff5c31d89fda6a6b8476397c04edf411dfc8bf520445cbb8ee1596fb073ea283ea130251a6fdffa5c3f5f2aaf75ca808048e33efce147c9dd92823640e338e68bfd7d0dc7a4905b3a7ac711e577e90e72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #101: special case hash", + "NoBenchmark": false + }, + { + "Input": "4f9618f98e2d3a15b24094f72bb5ffffffffa2fd3e2893683e5a6ab8cf0ee610ad019f74c6941d20efda70b46c53db166503a0e393e932f688227688ba6a576293320eb7ca0710255346bdbb3102cdcf7964ef2e0988e712bc05efe16c1993452927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #102: special case hash", + "NoBenchmark": false + }, + { + "Input": "422e82a3d56ed10a9cc21d31d37a25ffffffff67edf7c40204caae73ab0bc75aac8096842e8add68c34e78ce11dd71e4b54316bd3ebf7fffdeb7bd5a3ebc1883f5ca2f4f23d674502d4caf85d187215d36e3ce9f0ce219709f21a3aac003b7a82927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #103: special case hash", + "NoBenchmark": false + }, + { + "Input": "7075d245ccc3281b6e7b329ff738fbb417a5ffffffffa0842d9890b5cf95d018677b2d3a59b18a5ff939b70ea002250889ddcd7b7b9d776854b4943693fb92f76b4ba856ade7677bf30307b21f3ccda35d2f63aee81efd0bab6972cc0795db552927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #104: special case hash", + "NoBenchmark": false + }, + { + "Input": "3c80de54cd9226989443d593fa4fd6597e280ebeffffffffc1847eb76c217a95479e1ded14bcaed0379ba8e1b73d3115d84d31d4b7c30e1f05e1fc0d5957cfb0918f79e35b3d89487cf634a4f05b2e0c30857ca879f97c771e877027355b24432927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #105: special case hash", + "NoBenchmark": false + }, + { + "Input": "de21754e29b85601980bef3d697ea2770ce891a8cdffffffffc7906aa794b39b43dfccd0edb9e280d9a58f01164d55c3d711e14b12ac5cf3b64840ead512a0a31dbe33fa8ba84533cd5c4934365b3442ca1174899b78ef9a3199f495843897722927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #106: special case hash", + "NoBenchmark": false + }, + { + "Input": "8f65d92927cfb86a84dd59623fb531bb599e4d5f7289ffffffff2f1f2f57881c5b09ab637bd4caf0f4c7c7e4bca592fea20e9087c259d26a38bb4085f0bbff1145b7eb467b6748af618e9d80d6fdcd6aa24964e5a13f885bca8101de08eb0d752927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #107: special case hash", + "NoBenchmark": false + }, + { + "Input": "6b63e9a74e092120160bea3877dace8a2cc7cd0e8426cbfffffffffafc8c3ca85e9b1c5a028070df5728c5c8af9b74e0667afa570a6cfa0114a5039ed15ee06fb1360907e2d9785ead362bb8d7bd661b6c29eeffd3c5037744edaeb9ad990c202927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #108: special case hash", + "NoBenchmark": false + }, + { + "Input": "fc28259702a03845b6d75219444e8b43d094586e249c8699ffffffffe852512e0671a0a85c2b72d54a2fb0990e34538b4890050f5a5712f6d1a7a5fb8578f32edb1846bab6b7361479ab9c3285ca41291808f27fd5bd4fdac720e5854713694c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #109: special case hash", + "NoBenchmark": false + }, + { + "Input": "1273b4502ea4e3bccee044ee8e8db7f774ecbcd52e8ceb571757ffffffffe20a7673f8526748446477dbbb0590a45492c5d7d69859d301abbaedb35b2095103a3dc70ddf9c6b524d886bed9e6af02e0e4dec0d417a414fed3807ef4422913d7c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #110: special case hash", + "NoBenchmark": false + }, + { + "Input": "08fb565610a79baa0c566c66228d81814f8c53a15b96e602fb49ffffffffff6e7f085441070ecd2bb21285089ebb1aa6450d1a06c36d3ff39dfd657a796d12b5249712012029870a2459d18d47da9aa492a5e6cb4b2d8dafa9e4c5c54a2b9a8b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #111: special case hash", + "NoBenchmark": false + }, + { + "Input": "d59291cc2cf89f3087715fcb1aa4e79aa2403f748e97d7cd28ecaefeffffffff914c67fb61dd1e27c867398ea7322d5ab76df04bc5aa6683a8e0f30a5d287348fa07474031481dda4953e3ac1959ee8cea7e66ec412b38d6c96d28f6d37304ea2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #112: special case hash", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000004319055358e8617b0c46353d039cdaabffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254e0ad99500288d466940031d72a9f5445a4d43784640855bf0a69874d2de5fe103c5011e6ef2c42dcd50d5d3d29f99ae6eba2c80c9244f4c5422f0979ff0c3ba5e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #113: k*G has a large x-coordinate", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000fffffffffffffffffffffffcffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254e0ad99500288d466940031d72a9f5445a4d43784640855bf0a69874d2de5fe103c5011e6ef2c42dcd50d5d3d29f99ae6eba2c80c9244f4c5422f0979ff0c3ba5e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #114: r too large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254fffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254eab05fd9d0de26b9ce6f4819652d9fc69193d0aa398f0fba8013e09c58220455419235271228c786759095d12b75af0692dd4103f19f6a8c32f49435a1e9b8d45", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #115: r,s are large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd909135bdb6799286170f5ead2de4f6511453fe50914f3df2de54a36383df8dd480984f39a1ff38a86a68aa4201b6be5dfbfecf876219710b07badf6fdd4c6c5611feb97390d9826e7a06dfb41871c940d74415ed3cac2089f1445019bb55ed95", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #116: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27b4577ca009376f71303fd5dd227dcef5deb773ad5f5a84360644669ca249a54201b4272944201c3294f5baa9a3232b6dd687495fcc19a70a95bc602b4f7c0595c37eba9ee8171c1bb5ac6feaf753bc36f463e3aef16629572c0c0a8fb0800e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #117: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502300000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000001a71af64de5126a4a4e02b7922d66ce9415ce88a4c9d25514d91082c8725ac9575d47723c8fbe580bb369fec9c2665d8e30a435b9932645482e7c9f11e872296b", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #118: small r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000036627cec4f0731ea23fc2931f90ebe5b7572f597d20df08fc2b31ee8ef16b15726170ed77d8d0a14fc5c9c3c4c9be7f0d3ee18f709bb275eaf2073e258fe694a5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #120: small r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000055a7c8825e85691cce1f5e7544c54e73f14afc010cb731343262ca7ec5a77f5bfef6edf62a4497c1bd7b147fb6c3d22af3c39bfce95f30e13a16d3d7b2812f813", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #122: small r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502300000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006cbe0c29132cd738364fedd603152990c048e5e2fff996d883fa6caca7978c73770af6a8ce44cb41224b2603606f4c04d188e80bff7cc31ad5189d4ab0d70e8c1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #124: small r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325560000000000000000000000000000000000000000000000000000000000000006cbe0c29132cd738364fedd603152990c048e5e2fff996d883fa6caca7978c73770af6a8ce44cb41224b2603606f4c04d188e80bff7cc31ad5189d4ab0d70e8c1", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #126: r is larger than n", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000005ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc75fbd84be4178097002f0deab68f0d9a130e0ed33a6795d02a20796db83444b037e13920f13051e0eecdcfce4dacea0f50d1f247caa669f193c1b4075b51ae296d2d56", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #127: s is larger than n", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502300000000000000000000000000000000000000000000000000000000000001008f1e3c7862c58b16bb76eddbb76eddbb516af4f63f2d74d76e0d28c9bb75ea88d0f73792203716afd4be4329faa48d269f15313ebbba379d7783c97bf3e890d9971f4a3206605bec21782bf5e275c714417e8f566549e6bc68690d2363c89cc1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #128: small r and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000002d9b4d347952d6ef3043e7329581dbb3974497710ab11505ee1c87ff907beebadd195a0ffe6d7a4838b2be35a6276a80ef9e228140f9d9b96ce83b7a254f71ccdebbb8054ce05ffa9cbc123c919b19e00238198d04069043bd660a828814051fcb8aac738a6c6b", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #129: smallish r and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000001033e67e37b32b445580bf4eff8b748b74000000008b748b748b748b7466e769ad4a16d3dcd87129b8e91d1b4d7393983ca30a520bbc4783dc9960746aab444ef520c0a8e771119aa4e74b0f64e9d7be1ab01a0bf626e709863e6a486dbaf32793afccf774e2c6cd27b1857526", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #130: 100-bit r and small s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000100ef9f6ba4d97c09d03178fa20b4aaad83be3cf9cb824a879fec3270fc4b81ef5b5ac331a1103fe966697379f356a937f350588a05477e308851b8a502d5dfcdc5fe9993df4b57939b2b8da095bf6d794265204cfe03be995a02e65d408c871c0b", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #131: small r and 100 bit s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502300000000000000000000000000000000000000062522bbd3ecbe7c39e93e7c25ef9f6ba4d97c09d03178fa20b4aaad83be3cf9cb824a879fec3270fc4b81ef5b1d209be8de2de877095a399d3904c74cc458d926e27bb8e58e5eae5767c41509dd59e04c214f7b18dce351fc2a549893a6860e80163f38cc60a4f2c9d040d8c9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #132: 100-bit r and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6324d5555555550000000055555555555555553ef7a8e48d07df81a693439654210c70083539fbee44625e3acaafa2fcb41349392cef0633a1b8fabecee0c133b10e99915c1ebe7bf00df8535196770a58047ae2a402f26326bb7d41d4d7616337911e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #133: r and s^-1 are close to n", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c7000000000000000000000000000000000000000000000000000000000000000018aeb368a7027a4d64abdea37390c0c1d6a26f399e2d9734de1eb3d0e1937387405bd13834715e1dbae9b875cf07bd55e1b6691c7f7536aef3b19bf7a4adf576d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #134: s == 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c7000000000000000000000000000000000000000000000000000000000000000008aeb368a7027a4d64abdea37390c0c1d6a26f399e2d9734de1eb3d0e1937387405bd13834715e1dbae9b875cf07bd55e1b6691c7f7536aef3b19bf7a4adf576d", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #135: s == 0", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8555555550000000055555555555555553ef7a8e48d07df81a693439654210c70b533d4695dd5b8c5e07757e55e6e516f7e2c88fa0239e23f60e8ec07dd70f2871b134ee58cc583278456863f33c3a85d881f7d4a39850143e29d4eaf009afe47", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #136: point at infinity during verify", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8f50d371b91bfb1d7d14e1323523bc3aa8cbf2c57f9e284de628c8b4536787b86f94ad887ac94d527247cd2e7d0c8b1291c553c9730405380b14cbb209f5fa2dd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #137: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a968ec6e298eafe16539156ce57a14b04a7047c221bafc3a582eaeb0d857c4d94697bed1af17850117fdb39b2324f220a5698ed16c426a27335bb385ac8ca6fb30", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #138: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502369da0364734d2e530fece94019265fefb781a0f1b08f6c8897bdf6557927c8b866d2d3c7dcd518b23d726960f069ad71a933d86ef8abbcce8b20f71e2a847002", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #139: u1 == 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c7044a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52ed8adc00023a8edc02576e2b63e3e30621a471e2b2320620187bf067a1ac1ff3233e2b50ec09807accb36131fff95ed12a09a86b4ea9690aa32861576ba2362e1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #140: u1 == n - 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70555555550000000055555555555555553ef7a8e48d07df81a693439654210c703623ac973ced0a56fa6d882f03a7d5c7edca02cfc7b2401fab3690dbe75ab7858db06908e64b28613da7257e737f39793da8e713ba0643b92e9bb3252be7f8fe", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #141: u2 == 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70aaaaaaaa00000000aaaaaaaaaaaaaaaa7def51c91a0fbf034d26872ca84218e1cf04ea77e9622523d894b93ff52dc3027b31959503b6fa3890e5e04263f922f1e8528fb7c006b3983c8b8400e57b4ed71740c2f3975438821199bedeaecab2e9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #142: u2 == n - 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffde91e1ba60fdedb76a46bcb51dc0b8b4b7e019f0a28721885fa5d3a8196623397db7a2c8a1ab573e5929dc24077b508d7e683d49227996bda3e9f78dbeff773504f417f3bc9a88075c2e0aadd5a13311730cf7cc76a82f11a36eaf08a6c99a206", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #143: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdfdea5843ffeb73af94313ba4831b53fe24f799e525b1e8e8c87b59b95b430ad9dead11c7a5b396862f21974dc4752fadeff994efe9bbd05ab413765ea80b6e1f1de3f0640e8ac6edcf89cff53c40e265bb94078a343736df07aa0318fc7fe1ff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #144: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd03ffcabf2f1b4d2a65190db1680d62bb994e41c5251cd73b3c3dfc5e5bafc035d0bc472e0d7c81ebaed3a6ef96c18613bb1fea6f994326fbe80e00dfde67c7e9986c723ea4843d48389b946f64ad56c83ad70ff17ba85335667d1bb9fa619efd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #145: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd4dfbc401f971cd304b33dfdb17d0fed0fe4c1a88ae648e0d2847f74977534989a0a44ca947d66a2acb736008b9c08d1ab2ad03776e02640f78495d458dd51c326337fe5cf8c4604b1f1c409dc2d872d4294a4762420df43a30a2392e40426add", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #146: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbc4024761cd2ffd43dfdb17d0fed112b988977055cd3a8e54971eba9cda5ca71c9c2115290d008b45fb65fad0f602389298c25420b775019d42b62c3ce8a96b73877d25a8080dc02d987ca730f0405c2c9dbefac46f9e601cc3f06e9713973fd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #147: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd788048ed39a5ffa77bfb62fa1fda2257742bf35d128fb3459f2a0c909ee86f915eca1ef4c287dddc66b8bccf1b88e8a24c0018962f3c5e7efa83bc1a5ff6033e5e79c4cb2c245b8c45abdce8a8e4da758d92a607c32cd407ecaef22f1c934a71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #148: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd476d9131fd381bd917d0fed112bc9e0a5924b5ed5b11167edd8b23582b3cb15e5caaa030e7fdf0e4936bc7ab5a96353e0a01e4130c3f8bf22d473e317029a47adeb6adc462f7058f2a20d371e9702254e9b201642005b3ceda926b42b178bef9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #149: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd8374253e3e21bd154448d0a8f640fe46fafa8b19ce78d538f6cc0a19662d3601c2fd20bac06e555bb8ac0ce69eb1ea20f83a1fc3501c8a66469b1a31f619b0986237050779f52b615bd7b8d76a25fc95ca2ed32525c75f27ffc87ac397e6cbaf", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #150: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd357cfd3be4d01d413c5b9ede36cba5452c11ee7fe14879e749ae6a2d897a52d63fd6a1ca7f77fb3b0bbe726c372010068426e11ea6ae78ce17bedae4bba86ced03ce5516406bf8cfaab8745eac1cd69018ad6f50b5461872ddfc56e0db3c8ff4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #151: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd29798c5c0ee287d4a5e8e6b799fd86b8df5225298e6ffc807cd2f2bc27a0a6d89cb8e51e27a5ae3b624a60d6dc32734e4989db20e9bca3ede1edf7b086911114b4c104ab3c677e4b36d6556e8ad5f523410a19f2e277aa895fc57322b4427544", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #152: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0b70f22c781092452dca1a5711fa3a5a1f72add1bf52c2ff7cae4820b30078dda3e52c156dcaf10502620b7955bc2b40bc78ef3d569e1223c262512d8f49602a4a2039f31c1097024ad3cc86e57321de032355463486164cf192944977df147f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #153: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd16e1e458f021248a5b9434ae23f474b43ee55ba37ea585fef95c90416600f1baf19b78928720d5bee8e670fb90010fb15c37bf91b58a5157c3f3c059b2655e88cf701ec962fb4a11dcf273f5dc357e58468560c7cfeb942d074abd4329260509", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #154: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd2252d6856831b6cf895e4f0535eeaf0e5e5809753df848fe760ad86219016a9783a744459ecdfb01a5cf52b27a05bb7337482d242f235d7b4cb89345545c90a8c05d49337b9649813287de9ffe90355fd905df5f3c32945828121f37cc50de6e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #155: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd81ffe55f178da695b28c86d8b406b15dab1a9e39661a3ae017fbe390ac0972c3dd13c6b34c56982ddae124f039dfd23f4b19bbe88cee8e528ae51e5d6f3a21d7bfad4c2e6f263fe5eb59ca974d039fc0e4c3345692fb5320bdae4bd3b42a45ff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #156: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffffaaaaaaaaffffffffffffffffe9a2538f37b28a2c513dee40fecbb71a67e6f659cdde869a2f65f094e94e5b4dfad636bbf95192feeed01b0f3deb7460a37e0a51f258b7aeb51dfe592f5cfd5685bbe58712c8d9233c62886437c38ba0", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #157: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdb62f26b5f2a2b26f6de86d42ad8a13da3ab3cccd0459b201de009e526adf21f22eb6412505aec05c6545f029932087e490d05511e8ec1f599617bb367f9ecaaf805f51efcc4803403f9b1ae0124890f06a43fedcddb31830f6669af292895cb0", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #158: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb1d9ac949dd748cd02bbbe749bd351cd57b38bb61403d700686aa7b4c90851e84db645868eab35e3a9fd80e056e2e855435e3a6b68d75a50a854625fe0d7f356d2589ac655edc9a11ef3e075eddda9abf92e72171570ef7bf43a2ee39338cfe", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #159: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd66755a00638cdaec1c732513ca0234ece52545dac11f816e818f725b4f60aaf291b9e47c56278662d75c0983b22ca8ea6aa5059b7a2ff7637eb2975e386ad66349aa8ff283d0f77c18d6d11dc062165fd13c3c0310679c1408302a16854ecfbd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #160: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd55a00c9fcdaebb6032513ca0234ecfffe98ebe492fdf02e48ca48e982beb3669f3ec2f13caf04d0192b47fb4c5311fb6d4dc6b0a9e802e5327f7ec5ee8e4834df97e3e468b7d0db867d6ecfe81e2b0f9531df87efdb47c1338ac321fefe5a432", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #161: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdab40193f9b5d76c064a27940469d9fffd31d7c925fbe05c919491d3057d66cd2d92b200aefcab6ac7dafd9acaf2fa10b3180235b8f46b4503e4693c670fccc885ef2f3aebf5b317475336256768f7c19efb7352d27e4cccadc85b6b8ab922c72", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #162: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdca0234ebb5fdcb13ca0234ecffffffffcb0dadbbc7f549f8a26b4408d0dc86000a88361eb92ecca2625b38e5f98bbabb96bf179b3d76fc48140a3bcd881523cde6bdf56033f84a5054035597375d90866aa2c96b86a41ccf6edebf47298ad489", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #163: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff3ea3677e082b9310572620ae19933a9e65b285598711c77298815ad3d0fb17ccd8fafe827e0c1afc5d8d80366e2b20e7f14a563a2ba50469d84375e868612569d39e2bb9f554355564646de99ac602cc6349cf8c1e236a7de7637d93", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #164: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd266666663bbbbbbbe6666666666666665b37902e023fab7c8f055d86e5cc41f4836f33bbc1dc0d3d3abbcef0d91f11e2ac4181076c9af0a22b1e4309d3edb2769ab443ff6f901e30c773867582997c2bec2b0cb8120d760236f3a95bbe881f75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #165: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff36db6db7a492492492492492146c573f4c6dfc8d08a443e258970b0992f99fbe973ed4a299719baee4b432741237034dec8d72ba5103cb33e55feeb8033dd0e91134c734174889f3ebcf1b7a1ac05767289280ee7a794cebd6e69697", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #166: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff2aaaaaab7fffffffffffffffc815d0e60b3e596ecb1ad3a27cfd49c4d35ba58da30197d378e618ec0fa7e2e2d12cffd73ebbb2049d130bba434af09eff83986e6875e41ea432b7585a49b3a6c77cbb3c47919f8e82874c794635c1d2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #167: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffff55555555ffffffffffffffffd344a71e6f651458a27bdc81fd976e378651ce490f1b46d73f3ff475149be29136697334a519d7ddab0725c8d0793224e11c65bd8ca92dc8bc9ae82911f0b52751ce21dd9003ae60900bd825f590cc28", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #168: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd3fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192aa6d8e1b12c831a0da8795650ff95f101ed921d9e2f72b15b1cdaca9826b9cfc6def6d63e2bc5c089570394a4bc9f892d5e6c7a6a637b20469a58c106ad486bf37", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #169: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5d8ecd64a4eeba466815ddf3a4de9a8e6abd9c5db0a01eb80343553da648428f0ae580bae933b4ef2997cbdbb0922328ca9a410f627a0f7dff24cb4d920e15428911e7f8cc365a8a88eb81421a361ccc2b99e309d8dcd9a98ba83c3949d893e3", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #170: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569bb726660235793aa9957a61e76e00c2c435109cf9a15dd624d53f4301047856b5b812fd521aafa69835a849cce6fbdeb6983b442d2444fe70e134c027fc46963838a40f2a36092e9004e92d8d940cf5638550ce672ce8b8d4e15eba5499249e9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #171: point duplication during verification", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569bb726660235793aa9957a61e76e00c2c435109cf9a15dd624d53f4301047856b5b812fd521aafa69835a849cce6fbdeb6983b442d2444fe70e134c027fc469637c75bf0c5c9f6d17ffb16d2726bf30a9c7aaf31a8d317472b1ea145ab66db616", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #172: duplication bug", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001555555550000000055555555555555553ef7a8e48d07df81a693439654210c706adda82b90261b0f319faa0d878665a6b6da497f09c903176222c34acfef72a647e6f50dcc40ad5d9b59f7602bb222fad71a41bf5e1f9df4959a364c62e488d9", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #173: point with x-coordinate 0", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c703333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aa9dd86d3b5f4a13e8511083b78002081c53ff467f11ebd98a51a633db76665d25045d5c8200c89f2fa10d849349226d21d8dfaed6ff8d5cb3e1b7e17474ebc18f7", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #175: comparison with point at infinity ", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978555555550000000055555555555555553ef7a8e48d07df81a693439654210c704fea55b32cb32aca0c12c4cd0abfb4e64b0f5a516e578c016591a93f5a0fbcc5d7d3fd10b2be668c547b212f6bb14c88f0fecd38a8a4b2c785ed3be62ce4b280", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #176: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63ccc6a771527024227792170a6f8eee735bf32b7f98af669ead299802e32d7c3107bc3b4b5e65ab887bbd343572b3e5619261fe3a073e2ffd78412f726867db589e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #177: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa7851c2bbad08e54ec7a9af99f49f03644d6ec6d59b207fec98de85a7d15b956efcee9960283045075684b410be8d0f7494b91aa2379f60727319f10ddeb0fe9d6", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #178: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc476699783333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaf6417c8a670584e388676949e53da7fc55911ff68318d1bf3061205acb19c48f8f2b743df34ad0f72674acb7505929784779cd9ac916c3669ead43026ab6d43f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #179: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997849249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185501421277be45a5eefec6c639930d636032565af420cf3373f557faa7f8a06438673d6cb6076e1cfcdc7dfe7384c8e5cac08d74501f2ae6e89cad195d0aa1371", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #180: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997816a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb0d935bf9ffc115a527735f729ca8a4ca23ee01a4894adf0e3415ac84e808bb343195a3762fea29ed38912bd9ea6c4fde70c3050893a4375850ce61d82eba33c5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #181: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296555555550000000055555555555555553ef7a8e48d07df81a693439654210c705e59f50708646be8a589355014308e60b668fb670196206c41e748e64e4dca215de37fee5c97bcaf7144d5b459982f52eeeafbdf03aacbafef38e213624a01de", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #182: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc169fb797325843faff2f7a5b5445da9e2fd6226f7ef90ef0bfe924104b02db8e7bbb8de662c7b9b1cf9b22f7a2e582bd46d581d68878efb2b861b131d8a1d667", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #183: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa7271cd89c000143096b62d4e9e4ca885aef2f7023d18affdaf8b7b548981487540a1c6e954e32108435b55fa385b0f76481a609b9149ccb4b02b2ca47fe8e4da5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #184: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2963333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaa3d0bc7ed8f09d2cb7ddb46ebc1ed799ab1563a9ab84bf524587a220afe499c12e22dc3b3c103824a4f378d96adb0a408abf19ce7d68aa6244f78cb216fa3f8df", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #185: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29649249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185a6c885ade1a4c566f9bb010d066974abb281797fa701288c721bcbd23663a9b72e424b690957168d193a6096fc77a2b004a9c7d467e007e1f2058458f98af316", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #186: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29616a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb8d3c2c2c3b765ba8289e6ac3812572a25bf75df62d87ab7330c3bdbad9ebfa5c4c6845442d66935b238578d43aec54f7caa1621d1af241d4632e0b780c423f5d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #187: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #188: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502344a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52e249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #189: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #190: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502344a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52e249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #191: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855b292a619339f6e567a305c951c0dcbcc42d16e47f219f9e98e76e09d8770b34a0177e60492c5a8242f76f07bfe3661bde59ec2a17ce5bd2dab2abebdf89a62e204aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #192: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "dc1921946f4af96a2856e7be399007c9e807bdf4c5332f19f59ec9dd1bb8c7b3530bd6b0c9af2d69ba897f6b5fb59695cfbf33afe66dbadcf5b8d2a2a6538e23d85e489cb7a161fd55ededcedbf4cc0c0987e3e3f0f242cae934c72caa3f43e904aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #193: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023a8ea150cb80125d7381c4c1f1da8e9de2711f9917060406a73d7904519e51388f3ab9fa68bd47973a73b2d40480c2ba50c22c9d76ec217257288293285449b8604aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #194: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "de47c9b27eb8d300dbb5f2c353e632c393262cf06340c4fa7f1b40c4cbd36f90986e65933ef2ed4ee5aada139f52b70539aaf63f00a91f29c69178490d57fb713dafedfb8da6189d372308cbf1489bbbdabf0c0217d1c0ff0f701aaa7a694b9c04aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #195: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d434e262a49eab7781e353a3565e482550dd0fd5defa013c7f29745eff3569f19b0c0a93f267fb6052fd8077be769c2b98953195d7bc10de844218305c6ba17a4f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #196: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f910fe774355c04d060f76d79fd7a772e421463489221bf0a33add0be9b1979110b500dcba1c69a8fbd43fa4f57f743ce124ca8b91a1f325f3fac6181175df557374f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #197: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91bb40bf217bed3fb3950c7d39f03d36dc8e3b2cd79693f125bfd06595ee1135e3541bf3532351ebb032710bdb6a1bf1bfc89a1e291ac692b3fa4780745bb556774f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #198: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91664eb7ee6db84a34df3c86ea31389a5405badd5ca99231ff556d3e75a233e73a59f3c752e52eca46137642490a51560ce0badc678754b8f72e51a2901426a1bd3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #199: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f914cd0429bbabd2827009d6fcd843d4ce39c3e42e2d1631fd001985a79d1fd8b439638bf12dd682f60be7ef1d0e0d98f08b7bca77a1a2b869ae466189d2acdabe33cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #200: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91e56c6ea2d1b017091c44d8b6cb62b9f460e3ce9aed5e5fd41e8added97c56c04a308ec31f281e955be20b457e463440b4fcf2b80258078207fc1378180f89b553cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #201: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f911158a08d291500b4cabed3346d891eee57c176356a2624fb011f8fbbf3466830228a8c486a736006e082325b85290c5bc91f378b75d487dda46798c18f2855193cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #202: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b1db9289649f59410ea36b0c0fc8d6aa2687b29176939dd23e0dde56d309fa9d3e1535e4280559015b0dbd987366dcf43a6d1af5c23c7d584e1c3f48a12513363cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #203: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7b16e762286cb96446aa8d4e6e7578b0a341a79f2dd1a220ac6f0ca4e24ed86ddc60a700a139b04661c547d07bbb0721780146df799ccf55e55234ecb8f12bc3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #204: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d82a7c2717261187c8e00d8df963ff35d796edad36bc6e6bd1c91c670d9105b43dcabddaf8fcaa61f4603e7cbac0f3c0351ecd5988efb23f680d07debd1399292829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #205: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f915eb9c8845de68eb13d5befe719f462d77787802baff30ce96a5cba063254af782c026ae9be2e2a5e7ca0ff9bbd92fb6e44972186228ee9a62b87ddbe2ef66fb52829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #206: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9196843dd03c22abd2f3b782b170239f90f277921becc117d0404a8e4e36230c28f2be378f526f74a543f67165976de9ed9a31214eb4d7e6db19e1ede123dd991d2829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #207: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91766456dce1857c906f9996af729339464d27e9d98edc2d0e3b760297067421f6402385ecadae0d8081dccaf5d19037ec4e55376eced699e93646bfbbf19d0b41fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #208: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91c605c4b2edeab20419e6518a11b2dbc2b97ed8b07cced0b19c34f777de7b9fd9edf0f612c5f46e03c719647bc8af1b29b2cde2eda700fb1cff5e159d47326dbafffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #209: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d48b68e6cabfe03cf6141c9ac54141f210e64485d9929ad7b732bfe3b7eb8a84feedae50c61bd00e19dc26f9b7e2265e4508c389109ad2f208f0772315b6c941fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #210: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7c81457d4aeb6aa65957098569f0479710ad7f6595d5874c35a93d12a5dd4c7b7961a0b652878c2d568069a432ca18a1a9199f2ca574dad4b9e3a05c0a1cdb300000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #211: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f916b01332ddb6edfa9a30a1321d5858e1ee3cf97e263e669f8de5e9652e76ff3f75939545fced457309a6a04ace2bd0f70139c8f7d86b02cb1cc58f9e69e96cd5a00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #212: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91efdb884720eaeadc349f9fc356b6c0344101cd2fd8436b7d0e6a4fb93f106361f24bee6ad5dc05f7613975473aadf3aacba9e77de7d69b6ce48cb60d8113385d00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #213: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9131230428405560dcb88fb5a646836aea9b23a23dd973dcbe8014c87b8b20eb070f9344d6e812ce166646747694a41b0aaf97374e19f3c5fb8bd7ae3d9bd0beffbcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #214: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91caa797da65b320ab0d5c470cda0b36b294359c7db9841d679174db34c4855743cf543a62f23e212745391aaf7505f345123d2685ee3b941d3de6d9b36242e5a0bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #215: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f917e5f0ab5d900d3d3d7867657e5d6d36519bc54084536e7d21c336ed8001859459450c07f201faec94b82dfb322e5ac676688294aad35aa72e727ff0b19b646aabcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #216: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d7d70c581ae9e3f66dc6a480bf037ae23f8a1e4a2136fe4b03aa69f0ca25b35689c460f8a5a5c2bbba962c8a3ee833a413e85658e62a59e2af41d9127cc47224bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #217: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91341c1b9ff3c83dd5e0dfa0bf68bcdf4bb7aa20c625975e5eeee34bb396266b3472b69f061b750fd5121b22b11366fad549c634e77765a017902a67099e0a4469bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #218: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a9bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_p1363_test.json EcdsaP1363Verify SHA-256 #219: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b855d442f5b3c7b11eb6c4e0ae7525fe710fab9aa7c77a67f79e6fadd762927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #1: signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #2: Legacy:ASN encoding of s misses leading 0", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #3: valid", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502329a3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #118: modify first byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e98b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #120: modify last byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b491568475b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #121: modify last byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e1800b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b491568472927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #124: truncated integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #133: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5740946b2a147f59262ee6f5bc90bd01ed280528b62b3aed5fc93f06f739b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #134: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #137: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f47aa2bbd0a4c384ee1493b1f518ada018ef05465583885980861905228a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #139: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b865d442f5a3c7b11eb6c4e0ae79578ec6353a20bf783ecb4b6ea97b8252927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #143: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #177: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #178: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #179: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #180: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #181: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #187: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #188: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #189: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #190: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #191: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #197: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #198: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #199: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #200: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #201: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #207: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #208: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #209: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #210: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #211: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #217: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #218: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #219: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #220: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #221: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "70239dd877f7c944c422f44dea4ed1a52f2627416faf2f072fa50c772ed6f80764a1aab5000d0e804f3e2fc02bdee9be8ff312334e2ba16d11547c97711c898e6af015971cc30be6d1a206d4e013e0997772a2f91d73286ffd683b9bb2cf4f1b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #230: Edge case for Shamir multiplication", + "NoBenchmark": false + }, + { + "Input": "00000000690ed426ccf17803ebe2bd0884bcd58a1bb5e7477ead3645f356e7a916aea964a2f6506d6f78c81c91fc7e8bded7d397738448de1e19a0ec580bf266252cd762130c6667cfe8b7bc47d27d78391e8e80c578d1cd38c3ff033be928e92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #231: special case hash", + "NoBenchmark": false + }, + { + "Input": "7300000000213f2a525c6035725235c2f696ad3ebb5ee47f140697ad25770d919cc98be2347d469bf476dfc26b9b733df2d26d6ef524af917c665baccb23c882093496459effe2d8d70727b82462f61d0ec1b7847929d10ea631dacb16b56c322927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #232: special case hash", + "NoBenchmark": false + }, + { + "Input": "ddf2000000005e0be0635b245f0b97978afd25daadeb3edb4a0161c27fe0604573b3c90ecd390028058164524dde892703dce3dea0d53fa8093999f07ab8aa432f67b0b8e20636695bb7d8bf0a651c802ed25a395387b5f4188c0c4075c886342927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #233: special case hash", + "NoBenchmark": false + }, + { + "Input": "67ab1900000000784769c4ecb9e164d6642b8499588b89855be1ec355d0841a0bfab3098252847b328fadf2f89b95c851a7f0eb390763378f37e90119d5ba3ddbdd64e234e832b1067c2d058ccb44d978195ccebb65c2aaf1e2da9b8b4987e3b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #234: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2bf09460000000076d7dbeffe125eaf02095dff252ee905e296b6350fc311cf204a9784074b246d8bf8bf04a4ceb1c1f1c9aaab168b1596d17093c5cd21d2cd51cce41670636783dc06a759c8847868a406c2506fe17975582fe648d1d88b522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #235: special case hash", + "NoBenchmark": false + }, + { + "Input": "3554e827c700000000e1e75e624a06b3a0a353171160858129e15c544e4f0e65ed66dc34f551ac82f63d4aa4f81fe2cb0031a91d1314f835027bca0f1ceeaa0399ca123aa09b13cd194a422e18d5fda167623c3f6e5d4d6abb8953d67c0c48c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #236: special case hash", + "NoBenchmark": false + }, + { + "Input": "9b6cd3b812610000000026941a0f0bb53255ea4c9fd0cb3426e3a54b9fc6965c060b700bef665c68899d44f2356a578d126b062023ccc3c056bf0f60a237012b8d186c027832965f4fcc78a3366ca95dedbb410cbef3f26d6be5d581c11d36102927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #237: special case hash", + "NoBenchmark": false + }, + { + "Input": "883ae39f50bf0100000000e7561c26fc82a52baa51c71ca877162f93c4ae01869f6adfe8d5eb5b2c24d7aa7934b6cf29c93ea76cd313c9132bb0c8e38c96831db26a9c9e40e55ee0890c944cf271756c906a33e66b5bd15e051593883b5e99022927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #238: special case hash", + "NoBenchmark": false + }, + { + "Input": "a1ce5d6e5ecaf28b0000000000fa7cd010540f420fb4ff7401fe9fce011d0ba6a1af03ca91677b673ad2f33615e56174a1abf6da168cebfa8868f4ba273f16b720aa73ffe48afa6435cd258b173d0c2377d69022e7d098d75caf24c8c5e06b1c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #239: special case hash", + "NoBenchmark": false + }, + { + "Input": "8ea5f645f373f580930000000038345397330012a8ee836c5494cdffd5ee8054fdc70602766f8eed11a6c99a71c973d5659355507b843da6e327a28c11893db93df5349688a085b137b1eacf456a9e9e0f6d15ec0078ca60a7f83f2b10d213502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #240: special case hash", + "NoBenchmark": false + }, + { + "Input": "660570d323e9f75fa734000000008792d65ce93eabb7d60d8d9c1bbdcb5ef305b516a314f2fce530d6537f6a6c49966c23456f63c643cf8e0dc738f7b876e675d39ffd033c92b6d717dd536fbc5efdf1967c4bd80954479ba66b0120cd16fff22927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #241: special case hash", + "NoBenchmark": false + }, + { + "Input": "d0462673154cce587dde8800000000e98d35f1f45cf9c3bf46ada2de4c568c343b2cbf046eac45842ecb7984d475831582717bebb6492fd0a485c101e29ff0a84c9b7b47a98b0f82de512bc9313aaf51701099cac5f76e68c8595fc1c1d992582927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #242: special case hash", + "NoBenchmark": false + }, + { + "Input": "bd90640269a7822680cedfef000000000caef15a6171059ab83e7b4418d7278f30c87d35e636f540841f14af54e2f9edd79d0312cfa1ab656c3fb15bfde48dcf47c15a5a82d24b75c85a692bd6ecafeb71409ede23efd08e0db9abf6340677ed2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #243: special case hash", + "NoBenchmark": false + }, + { + "Input": "33239a52d72f1311512e41222a00000000d2dcceb301c54b4beae8e284788a7338686ff0fda2cef6bc43b58cfe6647b9e2e8176d168dec3c68ff262113760f52067ec3b651f422669601662167fa8717e976e2db5e6a4cf7c2ddabb3fde9d67d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #244: special case hash", + "NoBenchmark": false + }, + { + "Input": "b8d64fbcd4a1c10f1365d4e6d95c000000007ee4a21a1cbe1dc84c2d941ffaf144a3e23bf314f2b344fc25c7f2de8b6af3e17d27f5ee844b225985ab6e2775cf2d48e223205e98041ddc87be532abed584f0411f5729500493c9cc3f4dd15e862927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #245: special case hash", + "NoBenchmark": false + }, + { + "Input": "01603d3982bf77d7a3fef3183ed092000000003a227420db4088b20fe0e9d84a2ded5b7ec8e90e7bf11f967a3d95110c41b99db3b5aa8d330eb9d638781688e97d5792c53628155e1bfc46fb1a67e3088de049c328ae1f44ec69238a009808f92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #246: special case hash", + "NoBenchmark": false + }, + { + "Input": "9ea6994f1e0384c8599aa02e6cf66d9c000000004d89ef50b7e9eb0cfbff7363bdae7bcb580bf335efd3bc3d31870f923eaccafcd40ec2f605976f15137d8b8ff6dfa12f19e525270b0106eecfe257499f373a4fb318994f24838122ce7ec3c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #247: special case hash", + "NoBenchmark": false + }, + { + "Input": "d03215a8401bcf16693979371a01068a4700000000e2fa5bf692bc670905b18c50f9c4f0cd6940e162720957ffff513799209b78596956d21ece251c2401f1c6d7033a0a787d338e889defaaabb106b95a4355e411a59c32aa5167dfab2447262927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #248: special case hash", + "NoBenchmark": false + }, + { + "Input": "307bfaaffb650c889c84bf83f0300e5dc87e000000008408fd5f64b582e3bb14f612820687604fa01906066a378d67540982e29575d019aabe90924ead5c860d3f9367702dd7dd4f75ea98afd20e328a1a99f4857b316525328230ce294b0fef2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #249: special case hash", + "NoBenchmark": false + }, + { + "Input": "bab5c4f4df540d7b33324d36bb0c157551527c00000000e4af574bb4d54ea6b89505e407657d6e8bc93db5da7aa6f5081f61980c1949f56b0f2f507da5782a7ac60d31904e3669738ffbeccab6c3656c08e0ed5cb92b3cfa5e7f71784f9c50212927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #250: special case hash", + "NoBenchmark": false + }, + { + "Input": "d4ba47f6ae28f274e4f58d8036f9c36ec2456f5b00000000c3b869197ef5e15ebbd16fbbb656b6d0d83e6a7787cd691b08735aed371732723e1c68a40404517d9d8e35dba96028b7787d91315be675877d2d097be5e8ee34560e3e7fd25c0f002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #251: special case hash", + "NoBenchmark": false + }, + { + "Input": "79fd19c7235ea212f29f1fa00984342afe0f10aafd00000000801e47f8c184e12ec9760122db98fd06ea76848d35a6da442d2ceef7559a30cf57c61e92df327e7ab271da90859479701fccf86e462ee3393fb6814c27b760c4963625c0a198782927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #252: special case hash", + "NoBenchmark": false + }, + { + "Input": "8c291e8eeaa45adbaf9aba5c0583462d79cbeb7ac97300000000a37ea6700cda54e76b7683b6650baa6a7fc49b1c51eed9ba9dd463221f7a4f1005a89fe00c592ea076886c773eb937ec1cc8374b7915cfd11b1c1ae1166152f2f7806a31c8fd2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #253: special case hash", + "NoBenchmark": false + }, + { + "Input": "0eaae8641084fa979803efbfb8140732f4cdcf66c3f78a000000003c278a6b215291deaf24659ffbbce6e3c26f6021097a74abdbb69be4fb10419c0c496c946665d6fcf336d27cc7cdb982bb4e4ecef5827f84742f29f10abf83469270a03dc32927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #254: special case hash", + "NoBenchmark": false + }, + { + "Input": "e02716d01fb23a5a0068399bf01bab42ef17c6d96e13846c00000000afc0f89d207a3241812d75d947419dc58efb05e8003b33fc17eb50f9d15166a88479f107cdee749f2e492b213ce80b32d0574f62f1c5d70793cf55e382d5caadf75927672927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #255: special case hash", + "NoBenchmark": false + }, + { + "Input": "9eb0bf583a1a6b9a194e9a16bc7dab2a9061768af89d00659a00000000fc7de16554e49f82a855204328ac94913bf01bbe84437a355a0a37c0dee3cf81aa7728aea00de2507ddaf5c94e1e126980d3df16250a2eaebc8be486effe7f22b4f9292927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #256: special case hash", + "NoBenchmark": false + }, + { + "Input": "62aac98818b3b84a2c214f0d5e72ef286e1030cb53d9a82b690e00000000cd15a54c5062648339d2bff06f71c88216c26c6e19b4d80a8c602990ac82707efdfce99bbe7fcfafae3e69fd016777517aa01056317f467ad09aff09be73c9731b0d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #257: special case hash", + "NoBenchmark": false + }, + { + "Input": "3760a7f37cf96218f29ae43732e513efd2b6f552ea4b6895464b9300000000c8975bd7157a8d363b309f1f444012b1a1d23096593133e71b4ca8b059cff37eaf7faa7a28b1c822baa241793f2abc930bd4c69840fe090f2aacc46786bf9196222927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #258: special case hash", + "NoBenchmark": false + }, + { + "Input": "0da0a1d2851d33023834f2098c0880096b4320bea836cd9cbb6ff6c8000000005694a6f84b8f875c276afd2ebcfe4d61de9ec90305afb1357b95b3e0da43885e0dffad9ffd0b757d8051dec02ebdf70d8ee2dc5c7870c0823b6ccc7c679cbaa42927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #259: special case hash", + "NoBenchmark": false + }, + { + "Input": "ffffffff293886d3086fd567aafd598f0fe975f735887194a764a231e82d289aa0c30e8026fdb2b4b4968a27d16a6d08f7098f1a98d21620d7454ba9790f1ba65e470453a8a399f15baf463f9deceb53acc5ca64459149688bd2760c654243392927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #260: special case hash", + "NoBenchmark": false + }, + { + "Input": "7bffffffff2376d1e3c03445a072e24326acdc4ce127ec2e0e8d9ca99527e7b7614ea84acf736527dd73602cd4bb4eea1dfebebd5ad8aca52aa0228cf7b99a88737cc85f5f2d2f60d1b8183f3ed490e4de14368e96a9482c2a4dd193195c902f2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #261: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2b5ffffffffebb251b085377605a224bc80872602a6e467fd016807e97fa395bead6734ebe44b810d3fb2ea00b1732945377338febfd439a8d74dfbd0f942fa6bb18eae36616a7d3cad35919fd21a8af4bbe7a10f73b3e036a46b103ef56e2a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #262: special case hash", + "NoBenchmark": false + }, + { + "Input": "641227ffffffff6f1b96fa5f097fcf3cc1a3c256870d45a67b83d0967d4b20c0499625479e161dacd4db9d9ce64854c98d922cbf212703e9654fae182df9bad242c177cf37b8193a0131108d97819edd9439936028864ac195b64fca76d9d6932927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #263: special case hash", + "NoBenchmark": false + }, + { + "Input": "958415d8ffffffffabad03e2fc662dc3ba203521177502298df56f36600e0f8b08f16b8093a8fb4d66a2c8065b541b3d31e3bfe694f6b89c50fb1aaa6ff6c9b29d6455e2d5d1779748573b611cb95d4a21f967410399b39b535ba3e5af81ca2e2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #264: special case hash", + "NoBenchmark": false + }, + { + "Input": "f1d8de4858ffffffff1281093536f47fe13deb04e1fbe8fb954521b6975420f8be26231b6191658a19dd72ddb99ed8f8c579b6938d19bce8eed8dc2b338cb5f8e1d9a32ee56cffed37f0f22b2dcb57d5c943c14f79694a03b9c5e96952575c892927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #265: special case hash", + "NoBenchmark": false + }, + { + "Input": "0927895f2802ffffffff10782dd14a3b32dc5d47c05ef6f1876b95c81fc31def15e76880898316b16204ac920a02d58045f36a229d4aa4f812638c455abe0443e74d357d3fcb5c8c5337bd6aba4178b455ca10e226e13f9638196506a19391232927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #266: special case hash", + "NoBenchmark": false + }, + { + "Input": "60907984aa7e8effffffff4f332862a10a57c3063fb5a30624cf6a0c3ac80589352ecb53f8df2c503a45f9846fc28d1d31e6307d3ddbffc1132315cc07f16dad1348dfa9c482c558e1d05c5242ca1c39436726ecd28258b1899792887dd0a3c62927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #267: special case hash", + "NoBenchmark": false + }, + { + "Input": "c6ff198484939170ffffffff0af42cda50f9a5f50636ea6942d6b9b8cd6ae1e24a40801a7e606ba78a0da9882ab23c7677b8642349ed3d652c5bfa5f2a9558fb3a49b64848d682ef7f605f2832f7384bdc24ed2925825bf8ea77dc59817257822927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #268: special case hash", + "NoBenchmark": false + }, + { + "Input": "de030419345ca15c75ffffffff8074799b9e0956cc43135d16dfbe4d27d7e68deacc5e1a8304a74d2be412b078924b3bb3511bac855c05c9e5e9e44df3d61e967451cd8e18d6ed1885dd827714847f96ec4bb0ed4c36ce9808db8f714204f6d12927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #269: special case hash", + "NoBenchmark": false + }, + { + "Input": "6f0e3eeaf42b28132b88fffffffff6c8665604d34acb19037e1ab78caaaac6ff2f7a5e9e5771d424f30f67fdab61e8ce4f8cd1214882adb65f7de94c31577052ac4e69808345809b44acb0b2bd889175fb75dd050c5a449ab9528f8f78daa10c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #270: special case hash", + "NoBenchmark": false + }, + { + "Input": "cdb549f773b3e62b3708d1ffffffffbe48f7c0591ddcae7d2cb222d1f8017ab9ffcda40f792ce4d93e7e0f0e95e1a2147dddd7f6487621c30a03d710b330021979938b55f8a17f7ed7ba9ade8f2065a1fa77618f0b67add8d58c422c2453a49a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #271: special case hash", + "NoBenchmark": false + }, + { + "Input": "2c3f26f96a3ac0051df4989bffffffff9fd64886c1dc4f9924d8fd6f0edb048481f2359c4faba6b53d3e8c8c3fcc16a948350f7ab3a588b28c17603a431e39a8cd6f6a5cc3b55ead0ff695d06c6860b509e46d99fccefb9f7f9e101857f743002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #272: special case hash", + "NoBenchmark": false + }, + { + "Input": "ac18f8418c55a2502cb7d53f9affffffff5c31d89fda6a6b8476397c04edf411dfc8bf520445cbb8ee1596fb073ea283ea130251a6fdffa5c3f5f2aaf75ca808048e33efce147c9dd92823640e338e68bfd7d0dc7a4905b3a7ac711e577e90e72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #273: special case hash", + "NoBenchmark": false + }, + { + "Input": "4f9618f98e2d3a15b24094f72bb5ffffffffa2fd3e2893683e5a6ab8cf0ee610ad019f74c6941d20efda70b46c53db166503a0e393e932f688227688ba6a576293320eb7ca0710255346bdbb3102cdcf7964ef2e0988e712bc05efe16c1993452927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #274: special case hash", + "NoBenchmark": false + }, + { + "Input": "422e82a3d56ed10a9cc21d31d37a25ffffffff67edf7c40204caae73ab0bc75aac8096842e8add68c34e78ce11dd71e4b54316bd3ebf7fffdeb7bd5a3ebc1883f5ca2f4f23d674502d4caf85d187215d36e3ce9f0ce219709f21a3aac003b7a82927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #275: special case hash", + "NoBenchmark": false + }, + { + "Input": "7075d245ccc3281b6e7b329ff738fbb417a5ffffffffa0842d9890b5cf95d018677b2d3a59b18a5ff939b70ea002250889ddcd7b7b9d776854b4943693fb92f76b4ba856ade7677bf30307b21f3ccda35d2f63aee81efd0bab6972cc0795db552927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #276: special case hash", + "NoBenchmark": false + }, + { + "Input": "3c80de54cd9226989443d593fa4fd6597e280ebeffffffffc1847eb76c217a95479e1ded14bcaed0379ba8e1b73d3115d84d31d4b7c30e1f05e1fc0d5957cfb0918f79e35b3d89487cf634a4f05b2e0c30857ca879f97c771e877027355b24432927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #277: special case hash", + "NoBenchmark": false + }, + { + "Input": "de21754e29b85601980bef3d697ea2770ce891a8cdffffffffc7906aa794b39b43dfccd0edb9e280d9a58f01164d55c3d711e14b12ac5cf3b64840ead512a0a31dbe33fa8ba84533cd5c4934365b3442ca1174899b78ef9a3199f495843897722927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #278: special case hash", + "NoBenchmark": false + }, + { + "Input": "8f65d92927cfb86a84dd59623fb531bb599e4d5f7289ffffffff2f1f2f57881c5b09ab637bd4caf0f4c7c7e4bca592fea20e9087c259d26a38bb4085f0bbff1145b7eb467b6748af618e9d80d6fdcd6aa24964e5a13f885bca8101de08eb0d752927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #279: special case hash", + "NoBenchmark": false + }, + { + "Input": "6b63e9a74e092120160bea3877dace8a2cc7cd0e8426cbfffffffffafc8c3ca85e9b1c5a028070df5728c5c8af9b74e0667afa570a6cfa0114a5039ed15ee06fb1360907e2d9785ead362bb8d7bd661b6c29eeffd3c5037744edaeb9ad990c202927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #280: special case hash", + "NoBenchmark": false + }, + { + "Input": "fc28259702a03845b6d75219444e8b43d094586e249c8699ffffffffe852512e0671a0a85c2b72d54a2fb0990e34538b4890050f5a5712f6d1a7a5fb8578f32edb1846bab6b7361479ab9c3285ca41291808f27fd5bd4fdac720e5854713694c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #281: special case hash", + "NoBenchmark": false + }, + { + "Input": "1273b4502ea4e3bccee044ee8e8db7f774ecbcd52e8ceb571757ffffffffe20a7673f8526748446477dbbb0590a45492c5d7d69859d301abbaedb35b2095103a3dc70ddf9c6b524d886bed9e6af02e0e4dec0d417a414fed3807ef4422913d7c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #282: special case hash", + "NoBenchmark": false + }, + { + "Input": "08fb565610a79baa0c566c66228d81814f8c53a15b96e602fb49ffffffffff6e7f085441070ecd2bb21285089ebb1aa6450d1a06c36d3ff39dfd657a796d12b5249712012029870a2459d18d47da9aa492a5e6cb4b2d8dafa9e4c5c54a2b9a8b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #283: special case hash", + "NoBenchmark": false + }, + { + "Input": "d59291cc2cf89f3087715fcb1aa4e79aa2403f748e97d7cd28ecaefeffffffff914c67fb61dd1e27c867398ea7322d5ab76df04bc5aa6683a8e0f30a5d287348fa07474031481dda4953e3ac1959ee8cea7e66ec412b38d6c96d28f6d37304ea2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #284: special case hash", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000fffffffffffffffffffffffcffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254e0ad99500288d466940031d72a9f5445a4d43784640855bf0a69874d2de5fe103c5011e6ef2c42dcd50d5d3d29f99ae6eba2c80c9244f4c5422f0979ff0c3ba5e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #286: r too large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254fffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254eab05fd9d0de26b9ce6f4819652d9fc69193d0aa398f0fba8013e09c58220455419235271228c786759095d12b75af0692dd4103f19f6a8c32f49435a1e9b8d45", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #287: r,s are large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd909135bdb6799286170f5ead2de4f6511453fe50914f3df2de54a36383df8dd480984f39a1ff38a86a68aa4201b6be5dfbfecf876219710b07badf6fdd4c6c5611feb97390d9826e7a06dfb41871c940d74415ed3cac2089f1445019bb55ed95", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #288: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27b4577ca009376f71303fd5dd227dcef5deb773ad5f5a84360644669ca249a54201b4272944201c3294f5baa9a3232b6dd687495fcc19a70a95bc602b4f7c0595c37eba9ee8171c1bb5ac6feaf753bc36f463e3aef16629572c0c0a8fb0800e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #289: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6324d5555555550000000055555555555555553ef7a8e48d07df81a693439654210c70083539fbee44625e3acaafa2fcb41349392cef0633a1b8fabecee0c133b10e99915c1ebe7bf00df8535196770a58047ae2a402f26326bb7d41d4d7616337911e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #301: r and s^-1 are close to n", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8555555550000000055555555555555553ef7a8e48d07df81a693439654210c70b533d4695dd5b8c5e07757e55e6e516f7e2c88fa0239e23f60e8ec07dd70f2871b134ee58cc583278456863f33c3a85d881f7d4a39850143e29d4eaf009afe47", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #304: point at infinity during verify", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8f50d371b91bfb1d7d14e1323523bc3aa8cbf2c57f9e284de628c8b4536787b86f94ad887ac94d527247cd2e7d0c8b1291c553c9730405380b14cbb209f5fa2dd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #305: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a968ec6e298eafe16539156ce57a14b04a7047c221bafc3a582eaeb0d857c4d94697bed1af17850117fdb39b2324f220a5698ed16c426a27335bb385ac8ca6fb30", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #306: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502369da0364734d2e530fece94019265fefb781a0f1b08f6c8897bdf6557927c8b866d2d3c7dcd518b23d726960f069ad71a933d86ef8abbcce8b20f71e2a847002", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #307: u1 == 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c7044a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52ed8adc00023a8edc02576e2b63e3e30621a471e2b2320620187bf067a1ac1ff3233e2b50ec09807accb36131fff95ed12a09a86b4ea9690aa32861576ba2362e1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #308: u1 == n - 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70555555550000000055555555555555553ef7a8e48d07df81a693439654210c703623ac973ced0a56fa6d882f03a7d5c7edca02cfc7b2401fab3690dbe75ab7858db06908e64b28613da7257e737f39793da8e713ba0643b92e9bb3252be7f8fe", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #309: u2 == 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c70aaaaaaaa00000000aaaaaaaaaaaaaaaa7def51c91a0fbf034d26872ca84218e1cf04ea77e9622523d894b93ff52dc3027b31959503b6fa3890e5e04263f922f1e8528fb7c006b3983c8b8400e57b4ed71740c2f3975438821199bedeaecab2e9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #310: u2 == n - 1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffde91e1ba60fdedb76a46bcb51dc0b8b4b7e019f0a28721885fa5d3a8196623397db7a2c8a1ab573e5929dc24077b508d7e683d49227996bda3e9f78dbeff773504f417f3bc9a88075c2e0aadd5a13311730cf7cc76a82f11a36eaf08a6c99a206", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #311: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdfdea5843ffeb73af94313ba4831b53fe24f799e525b1e8e8c87b59b95b430ad9dead11c7a5b396862f21974dc4752fadeff994efe9bbd05ab413765ea80b6e1f1de3f0640e8ac6edcf89cff53c40e265bb94078a343736df07aa0318fc7fe1ff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #312: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd03ffcabf2f1b4d2a65190db1680d62bb994e41c5251cd73b3c3dfc5e5bafc035d0bc472e0d7c81ebaed3a6ef96c18613bb1fea6f994326fbe80e00dfde67c7e9986c723ea4843d48389b946f64ad56c83ad70ff17ba85335667d1bb9fa619efd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #313: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd4dfbc401f971cd304b33dfdb17d0fed0fe4c1a88ae648e0d2847f74977534989a0a44ca947d66a2acb736008b9c08d1ab2ad03776e02640f78495d458dd51c326337fe5cf8c4604b1f1c409dc2d872d4294a4762420df43a30a2392e40426add", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #314: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbc4024761cd2ffd43dfdb17d0fed112b988977055cd3a8e54971eba9cda5ca71c9c2115290d008b45fb65fad0f602389298c25420b775019d42b62c3ce8a96b73877d25a8080dc02d987ca730f0405c2c9dbefac46f9e601cc3f06e9713973fd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #315: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd788048ed39a5ffa77bfb62fa1fda2257742bf35d128fb3459f2a0c909ee86f915eca1ef4c287dddc66b8bccf1b88e8a24c0018962f3c5e7efa83bc1a5ff6033e5e79c4cb2c245b8c45abdce8a8e4da758d92a607c32cd407ecaef22f1c934a71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #316: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd476d9131fd381bd917d0fed112bc9e0a5924b5ed5b11167edd8b23582b3cb15e5caaa030e7fdf0e4936bc7ab5a96353e0a01e4130c3f8bf22d473e317029a47adeb6adc462f7058f2a20d371e9702254e9b201642005b3ceda926b42b178bef9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #317: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd8374253e3e21bd154448d0a8f640fe46fafa8b19ce78d538f6cc0a19662d3601c2fd20bac06e555bb8ac0ce69eb1ea20f83a1fc3501c8a66469b1a31f619b0986237050779f52b615bd7b8d76a25fc95ca2ed32525c75f27ffc87ac397e6cbaf", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #318: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd357cfd3be4d01d413c5b9ede36cba5452c11ee7fe14879e749ae6a2d897a52d63fd6a1ca7f77fb3b0bbe726c372010068426e11ea6ae78ce17bedae4bba86ced03ce5516406bf8cfaab8745eac1cd69018ad6f50b5461872ddfc56e0db3c8ff4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #319: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd29798c5c0ee287d4a5e8e6b799fd86b8df5225298e6ffc807cd2f2bc27a0a6d89cb8e51e27a5ae3b624a60d6dc32734e4989db20e9bca3ede1edf7b086911114b4c104ab3c677e4b36d6556e8ad5f523410a19f2e277aa895fc57322b4427544", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #320: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0b70f22c781092452dca1a5711fa3a5a1f72add1bf52c2ff7cae4820b30078dda3e52c156dcaf10502620b7955bc2b40bc78ef3d569e1223c262512d8f49602a4a2039f31c1097024ad3cc86e57321de032355463486164cf192944977df147f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #321: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd16e1e458f021248a5b9434ae23f474b43ee55ba37ea585fef95c90416600f1baf19b78928720d5bee8e670fb90010fb15c37bf91b58a5157c3f3c059b2655e88cf701ec962fb4a11dcf273f5dc357e58468560c7cfeb942d074abd4329260509", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #322: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd2252d6856831b6cf895e4f0535eeaf0e5e5809753df848fe760ad86219016a9783a744459ecdfb01a5cf52b27a05bb7337482d242f235d7b4cb89345545c90a8c05d49337b9649813287de9ffe90355fd905df5f3c32945828121f37cc50de6e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #323: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd81ffe55f178da695b28c86d8b406b15dab1a9e39661a3ae017fbe390ac0972c3dd13c6b34c56982ddae124f039dfd23f4b19bbe88cee8e528ae51e5d6f3a21d7bfad4c2e6f263fe5eb59ca974d039fc0e4c3345692fb5320bdae4bd3b42a45ff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #324: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffffaaaaaaaaffffffffffffffffe9a2538f37b28a2c513dee40fecbb71a67e6f659cdde869a2f65f094e94e5b4dfad636bbf95192feeed01b0f3deb7460a37e0a51f258b7aeb51dfe592f5cfd5685bbe58712c8d9233c62886437c38ba0", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #325: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdb62f26b5f2a2b26f6de86d42ad8a13da3ab3cccd0459b201de009e526adf21f22eb6412505aec05c6545f029932087e490d05511e8ec1f599617bb367f9ecaaf805f51efcc4803403f9b1ae0124890f06a43fedcddb31830f6669af292895cb0", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #326: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb1d9ac949dd748cd02bbbe749bd351cd57b38bb61403d700686aa7b4c90851e84db645868eab35e3a9fd80e056e2e855435e3a6b68d75a50a854625fe0d7f356d2589ac655edc9a11ef3e075eddda9abf92e72171570ef7bf43a2ee39338cfe", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #327: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd66755a00638cdaec1c732513ca0234ece52545dac11f816e818f725b4f60aaf291b9e47c56278662d75c0983b22ca8ea6aa5059b7a2ff7637eb2975e386ad66349aa8ff283d0f77c18d6d11dc062165fd13c3c0310679c1408302a16854ecfbd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #328: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd55a00c9fcdaebb6032513ca0234ecfffe98ebe492fdf02e48ca48e982beb3669f3ec2f13caf04d0192b47fb4c5311fb6d4dc6b0a9e802e5327f7ec5ee8e4834df97e3e468b7d0db867d6ecfe81e2b0f9531df87efdb47c1338ac321fefe5a432", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #329: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdab40193f9b5d76c064a27940469d9fffd31d7c925fbe05c919491d3057d66cd2d92b200aefcab6ac7dafd9acaf2fa10b3180235b8f46b4503e4693c670fccc885ef2f3aebf5b317475336256768f7c19efb7352d27e4cccadc85b6b8ab922c72", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #330: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdca0234ebb5fdcb13ca0234ecffffffffcb0dadbbc7f549f8a26b4408d0dc86000a88361eb92ecca2625b38e5f98bbabb96bf179b3d76fc48140a3bcd881523cde6bdf56033f84a5054035597375d90866aa2c96b86a41ccf6edebf47298ad489", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #331: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff3ea3677e082b9310572620ae19933a9e65b285598711c77298815ad3d0fb17ccd8fafe827e0c1afc5d8d80366e2b20e7f14a563a2ba50469d84375e868612569d39e2bb9f554355564646de99ac602cc6349cf8c1e236a7de7637d93", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #332: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd266666663bbbbbbbe6666666666666665b37902e023fab7c8f055d86e5cc41f4836f33bbc1dc0d3d3abbcef0d91f11e2ac4181076c9af0a22b1e4309d3edb2769ab443ff6f901e30c773867582997c2bec2b0cb8120d760236f3a95bbe881f75", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #333: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff36db6db7a492492492492492146c573f4c6dfc8d08a443e258970b0992f99fbe973ed4a299719baee4b432741237034dec8d72ba5103cb33e55feeb8033dd0e91134c734174889f3ebcf1b7a1ac05767289280ee7a794cebd6e69697", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #334: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff2aaaaaab7fffffffffffffffc815d0e60b3e596ecb1ad3a27cfd49c4d35ba58da30197d378e618ec0fa7e2e2d12cffd73ebbb2049d130bba434af09eff83986e6875e41ea432b7585a49b3a6c77cbb3c47919f8e82874c794635c1d2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #335: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffff55555555ffffffffffffffffd344a71e6f651458a27bdc81fd976e378651ce490f1b46d73f3ff475149be29136697334a519d7ddab0725c8d0793224e11c65bd8ca92dc8bc9ae82911f0b52751ce21dd9003ae60900bd825f590cc28", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #336: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd3fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192aa6d8e1b12c831a0da8795650ff95f101ed921d9e2f72b15b1cdaca9826b9cfc6def6d63e2bc5c089570394a4bc9f892d5e6c7a6a637b20469a58c106ad486bf37", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #337: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5d8ecd64a4eeba466815ddf3a4de9a8e6abd9c5db0a01eb80343553da648428f0ae580bae933b4ef2997cbdbb0922328ca9a410f627a0f7dff24cb4d920e15428911e7f8cc365a8a88eb81421a361ccc2b99e309d8dcd9a98ba83c3949d893e3", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #338: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569bb726660235793aa9957a61e76e00c2c435109cf9a15dd624d53f4301047856b5b812fd521aafa69835a849cce6fbdeb6983b442d2444fe70e134c027fc46963838a40f2a36092e9004e92d8d940cf5638550ce672ce8b8d4e15eba5499249e9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #339: point duplication during verification", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569bb726660235793aa9957a61e76e00c2c435109cf9a15dd624d53f4301047856b5b812fd521aafa69835a849cce6fbdeb6983b442d2444fe70e134c027fc469637c75bf0c5c9f6d17ffb16d2726bf30a9c7aaf31a8d317472b1ea145ab66db616", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #340: duplication bug", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023555555550000000055555555555555553ef7a8e48d07df81a693439654210c703333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aa9dd86d3b5f4a13e8511083b78002081c53ff467f11ebd98a51a633db76665d25045d5c8200c89f2fa10d849349226d21d8dfaed6ff8d5cb3e1b7e17474ebc18f7", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #343: comparison with point at infinity ", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978555555550000000055555555555555553ef7a8e48d07df81a693439654210c704fea55b32cb32aca0c12c4cd0abfb4e64b0f5a516e578c016591a93f5a0fbcc5d7d3fd10b2be668c547b212f6bb14c88f0fecd38a8a4b2c785ed3be62ce4b280", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #344: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63ccc6a771527024227792170a6f8eee735bf32b7f98af669ead299802e32d7c3107bc3b4b5e65ab887bbd343572b3e5619261fe3a073e2ffd78412f726867db589e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #345: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa7851c2bbad08e54ec7a9af99f49f03644d6ec6d59b207fec98de85a7d15b956efcee9960283045075684b410be8d0f7494b91aa2379f60727319f10ddeb0fe9d6", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #346: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc476699783333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaf6417c8a670584e388676949e53da7fc55911ff68318d1bf3061205acb19c48f8f2b743df34ad0f72674acb7505929784779cd9ac916c3669ead43026ab6d43f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #347: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997849249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185501421277be45a5eefec6c639930d636032565af420cf3373f557faa7f8a06438673d6cb6076e1cfcdc7dfe7384c8e5cac08d74501f2ae6e89cad195d0aa1371", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #348: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050237cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997816a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb0d935bf9ffc115a527735f729ca8a4ca23ee01a4894adf0e3415ac84e808bb343195a3762fea29ed38912bd9ea6c4fde70c3050893a4375850ce61d82eba33c5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #349: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296555555550000000055555555555555553ef7a8e48d07df81a693439654210c705e59f50708646be8a589355014308e60b668fb670196206c41e748e64e4dca215de37fee5c97bcaf7144d5b459982f52eeeafbdf03aacbafef38e213624a01de", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #350: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc169fb797325843faff2f7a5b5445da9e2fd6226f7ef90ef0bfe924104b02db8e7bbb8de662c7b9b1cf9b22f7a2e582bd46d581d68878efb2b861b131d8a1d667", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #351: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa7271cd89c000143096b62d4e9e4ca885aef2f7023d18affdaf8b7b548981487540a1c6e954e32108435b55fa385b0f76481a609b9149ccb4b02b2ca47fe8e4da5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #352: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2963333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaa3d0bc7ed8f09d2cb7ddb46ebc1ed799ab1563a9ab84bf524587a220afe499c12e22dc3b3c103824a4f378d96adb0a408abf19ce7d68aa6244f78cb216fa3f8df", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #353: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29649249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185a6c885ade1a4c566f9bb010d066974abb281797fa701288c721bcbd23663a9b72e424b690957168d193a6096fc77a2b004a9c7d467e007e1f2058458f98af316", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #354: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050236b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29616a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb8d3c2c2c3b765ba8289e6ac3812572a25bf75df62d87ab7330c3bdbad9ebfa5c4c6845442d66935b238578d43aec54f7caa1621d1af241d4632e0b780c423f5d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #355: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #356: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502344a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52e249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #357: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #358: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502344a5ad0ad0636d9f12bc9e0a6bdd5e1cbcb012ea7bf091fcec15b0c43202d52e249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #359: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855b292a619339f6e567a305c951c0dcbcc42d16e47f219f9e98e76e09d8770b34a0177e60492c5a8242f76f07bfe3661bde59ec2a17ce5bd2dab2abebdf89a62e204aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #360: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "dc1921946f4af96a2856e7be399007c9e807bdf4c5332f19f59ec9dd1bb8c7b3530bd6b0c9af2d69ba897f6b5fb59695cfbf33afe66dbadcf5b8d2a2a6538e23d85e489cb7a161fd55ededcedbf4cc0c0987e3e3f0f242cae934c72caa3f43e904aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #361: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023a8ea150cb80125d7381c4c1f1da8e9de2711f9917060406a73d7904519e51388f3ab9fa68bd47973a73b2d40480c2ba50c22c9d76ec217257288293285449b8604aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #362: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "de47c9b27eb8d300dbb5f2c353e632c393262cf06340c4fa7f1b40c4cbd36f90986e65933ef2ed4ee5aada139f52b70539aaf63f00a91f29c69178490d57fb713dafedfb8da6189d372308cbf1489bbbdabf0c0217d1c0ff0f701aaa7a694b9c04aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #363: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d434e262a49eab7781e353a3565e482550dd0fd5defa013c7f29745eff3569f19b0c0a93f267fb6052fd8077be769c2b98953195d7bc10de844218305c6ba17a4f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #364: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f910fe774355c04d060f76d79fd7a772e421463489221bf0a33add0be9b1979110b500dcba1c69a8fbd43fa4f57f743ce124ca8b91a1f325f3fac6181175df557374f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #365: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91bb40bf217bed3fb3950c7d39f03d36dc8e3b2cd79693f125bfd06595ee1135e3541bf3532351ebb032710bdb6a1bf1bfc89a1e291ac692b3fa4780745bb556774f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #366: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91664eb7ee6db84a34df3c86ea31389a5405badd5ca99231ff556d3e75a233e73a59f3c752e52eca46137642490a51560ce0badc678754b8f72e51a2901426a1bd3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #367: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f914cd0429bbabd2827009d6fcd843d4ce39c3e42e2d1631fd001985a79d1fd8b439638bf12dd682f60be7ef1d0e0d98f08b7bca77a1a2b869ae466189d2acdabe33cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #368: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91e56c6ea2d1b017091c44d8b6cb62b9f460e3ce9aed5e5fd41e8added97c56c04a308ec31f281e955be20b457e463440b4fcf2b80258078207fc1378180f89b553cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #369: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f911158a08d291500b4cabed3346d891eee57c176356a2624fb011f8fbbf3466830228a8c486a736006e082325b85290c5bc91f378b75d487dda46798c18f2855193cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #370: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b1db9289649f59410ea36b0c0fc8d6aa2687b29176939dd23e0dde56d309fa9d3e1535e4280559015b0dbd987366dcf43a6d1af5c23c7d584e1c3f48a12513363cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #371: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7b16e762286cb96446aa8d4e6e7578b0a341a79f2dd1a220ac6f0ca4e24ed86ddc60a700a139b04661c547d07bbb0721780146df799ccf55e55234ecb8f12bc3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #372: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d82a7c2717261187c8e00d8df963ff35d796edad36bc6e6bd1c91c670d9105b43dcabddaf8fcaa61f4603e7cbac0f3c0351ecd5988efb23f680d07debd1399292829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #373: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f915eb9c8845de68eb13d5befe719f462d77787802baff30ce96a5cba063254af782c026ae9be2e2a5e7ca0ff9bbd92fb6e44972186228ee9a62b87ddbe2ef66fb52829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #374: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9196843dd03c22abd2f3b782b170239f90f277921becc117d0404a8e4e36230c28f2be378f526f74a543f67165976de9ed9a31214eb4d7e6db19e1ede123dd991d2829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #375: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91766456dce1857c906f9996af729339464d27e9d98edc2d0e3b760297067421f6402385ecadae0d8081dccaf5d19037ec4e55376eced699e93646bfbbf19d0b41fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #376: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91c605c4b2edeab20419e6518a11b2dbc2b97ed8b07cced0b19c34f777de7b9fd9edf0f612c5f46e03c719647bc8af1b29b2cde2eda700fb1cff5e159d47326dbafffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #377: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d48b68e6cabfe03cf6141c9ac54141f210e64485d9929ad7b732bfe3b7eb8a84feedae50c61bd00e19dc26f9b7e2265e4508c389109ad2f208f0772315b6c941fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #378: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7c81457d4aeb6aa65957098569f0479710ad7f6595d5874c35a93d12a5dd4c7b7961a0b652878c2d568069a432ca18a1a9199f2ca574dad4b9e3a05c0a1cdb300000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #379: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f916b01332ddb6edfa9a30a1321d5858e1ee3cf97e263e669f8de5e9652e76ff3f75939545fced457309a6a04ace2bd0f70139c8f7d86b02cb1cc58f9e69e96cd5a00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #380: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91efdb884720eaeadc349f9fc356b6c0344101cd2fd8436b7d0e6a4fb93f106361f24bee6ad5dc05f7613975473aadf3aacba9e77de7d69b6ce48cb60d8113385d00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #381: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9131230428405560dcb88fb5a646836aea9b23a23dd973dcbe8014c87b8b20eb070f9344d6e812ce166646747694a41b0aaf97374e19f3c5fb8bd7ae3d9bd0beffbcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #382: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91caa797da65b320ab0d5c470cda0b36b294359c7db9841d679174db34c4855743cf543a62f23e212745391aaf7505f345123d2685ee3b941d3de6d9b36242e5a0bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #383: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f917e5f0ab5d900d3d3d7867657e5d6d36519bc54084536e7d21c336ed8001859459450c07f201faec94b82dfb322e5ac676688294aad35aa72e727ff0b19b646aabcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #384: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d7d70c581ae9e3f66dc6a480bf037ae23f8a1e4a2136fe4b03aa69f0ca25b35689c460f8a5a5c2bbba962c8a3ee833a413e85658e62a59e2af41d9127cc47224bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #385: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91341c1b9ff3c83dd5e0dfa0bf68bcdf4bb7aa20c625975e5eeee34bb396266b3472b69f061b750fd5121b22b11366fad549c634e77765a017902a67099e0a4469bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #386: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a9bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_secp256r1_sha256_test.json EcdsaVerify SHA-256 #387: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b855d442f5b3c7b11eb6c4e0ae7525fe710fab9aa7c77a67f79e6fadd762927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1: signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #2: Legacy:ASN encoding of s misses leading 0", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #3: valid", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca60502329a3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #118: modify first byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e98b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #120: modify last byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b491568475b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #121: modify last byte of integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e1800b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b491568472927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #124: truncated integer", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #133: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5740946b2a147f59262ee6f5bc90bd01ed280528b62b3aed5fc93f06f739b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #134: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #137: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e18b329f47aa2bbd0a4c384ee1493b1f518ada018ef05465583885980861905228a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #139: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b865d442f5a3c7b11eb6c4e0ae79578ec6353a20bf783ecb4b6ea97b8252927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #143: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #177: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #178: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #179: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #180: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #181: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #187: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #188: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #189: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #190: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #191: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #197: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #198: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #199: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #200: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #201: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #207: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #208: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #209: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #210: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #211: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #217: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #218: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #219: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #220: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #221: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "70239dd877f7c944c422f44dea4ed1a52f2627416faf2f072fa50c772ed6f80764a1aab5000d0e804f3e2fc02bdee9be8ff312334e2ba16d11547c97711c898e6af015971cc30be6d1a206d4e013e0997772a2f91d73286ffd683b9bb2cf4f1b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #230: Edge case for Shamir multiplication", + "NoBenchmark": false + }, + { + "Input": "00000000690ed426ccf17803ebe2bd0884bcd58a1bb5e7477ead3645f356e7a916aea964a2f6506d6f78c81c91fc7e8bded7d397738448de1e19a0ec580bf266252cd762130c6667cfe8b7bc47d27d78391e8e80c578d1cd38c3ff033be928e92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #231: special case hash", + "NoBenchmark": false + }, + { + "Input": "7300000000213f2a525c6035725235c2f696ad3ebb5ee47f140697ad25770d919cc98be2347d469bf476dfc26b9b733df2d26d6ef524af917c665baccb23c882093496459effe2d8d70727b82462f61d0ec1b7847929d10ea631dacb16b56c322927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #232: special case hash", + "NoBenchmark": false + }, + { + "Input": "ddf2000000005e0be0635b245f0b97978afd25daadeb3edb4a0161c27fe0604573b3c90ecd390028058164524dde892703dce3dea0d53fa8093999f07ab8aa432f67b0b8e20636695bb7d8bf0a651c802ed25a395387b5f4188c0c4075c886342927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #233: special case hash", + "NoBenchmark": false + }, + { + "Input": "67ab1900000000784769c4ecb9e164d6642b8499588b89855be1ec355d0841a0bfab3098252847b328fadf2f89b95c851a7f0eb390763378f37e90119d5ba3ddbdd64e234e832b1067c2d058ccb44d978195ccebb65c2aaf1e2da9b8b4987e3b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #234: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2bf09460000000076d7dbeffe125eaf02095dff252ee905e296b6350fc311cf204a9784074b246d8bf8bf04a4ceb1c1f1c9aaab168b1596d17093c5cd21d2cd51cce41670636783dc06a759c8847868a406c2506fe17975582fe648d1d88b522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #235: special case hash", + "NoBenchmark": false + }, + { + "Input": "3554e827c700000000e1e75e624a06b3a0a353171160858129e15c544e4f0e65ed66dc34f551ac82f63d4aa4f81fe2cb0031a91d1314f835027bca0f1ceeaa0399ca123aa09b13cd194a422e18d5fda167623c3f6e5d4d6abb8953d67c0c48c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #236: special case hash", + "NoBenchmark": false + }, + { + "Input": "9b6cd3b812610000000026941a0f0bb53255ea4c9fd0cb3426e3a54b9fc6965c060b700bef665c68899d44f2356a578d126b062023ccc3c056bf0f60a237012b8d186c027832965f4fcc78a3366ca95dedbb410cbef3f26d6be5d581c11d36102927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #237: special case hash", + "NoBenchmark": false + }, + { + "Input": "883ae39f50bf0100000000e7561c26fc82a52baa51c71ca877162f93c4ae01869f6adfe8d5eb5b2c24d7aa7934b6cf29c93ea76cd313c9132bb0c8e38c96831db26a9c9e40e55ee0890c944cf271756c906a33e66b5bd15e051593883b5e99022927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #238: special case hash", + "NoBenchmark": false + }, + { + "Input": "a1ce5d6e5ecaf28b0000000000fa7cd010540f420fb4ff7401fe9fce011d0ba6a1af03ca91677b673ad2f33615e56174a1abf6da168cebfa8868f4ba273f16b720aa73ffe48afa6435cd258b173d0c2377d69022e7d098d75caf24c8c5e06b1c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #239: special case hash", + "NoBenchmark": false + }, + { + "Input": "8ea5f645f373f580930000000038345397330012a8ee836c5494cdffd5ee8054fdc70602766f8eed11a6c99a71c973d5659355507b843da6e327a28c11893db93df5349688a085b137b1eacf456a9e9e0f6d15ec0078ca60a7f83f2b10d213502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #240: special case hash", + "NoBenchmark": false + }, + { + "Input": "660570d323e9f75fa734000000008792d65ce93eabb7d60d8d9c1bbdcb5ef305b516a314f2fce530d6537f6a6c49966c23456f63c643cf8e0dc738f7b876e675d39ffd033c92b6d717dd536fbc5efdf1967c4bd80954479ba66b0120cd16fff22927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #241: special case hash", + "NoBenchmark": false + }, + { + "Input": "d0462673154cce587dde8800000000e98d35f1f45cf9c3bf46ada2de4c568c343b2cbf046eac45842ecb7984d475831582717bebb6492fd0a485c101e29ff0a84c9b7b47a98b0f82de512bc9313aaf51701099cac5f76e68c8595fc1c1d992582927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #242: special case hash", + "NoBenchmark": false + }, + { + "Input": "bd90640269a7822680cedfef000000000caef15a6171059ab83e7b4418d7278f30c87d35e636f540841f14af54e2f9edd79d0312cfa1ab656c3fb15bfde48dcf47c15a5a82d24b75c85a692bd6ecafeb71409ede23efd08e0db9abf6340677ed2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #243: special case hash", + "NoBenchmark": false + }, + { + "Input": "33239a52d72f1311512e41222a00000000d2dcceb301c54b4beae8e284788a7338686ff0fda2cef6bc43b58cfe6647b9e2e8176d168dec3c68ff262113760f52067ec3b651f422669601662167fa8717e976e2db5e6a4cf7c2ddabb3fde9d67d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #244: special case hash", + "NoBenchmark": false + }, + { + "Input": "b8d64fbcd4a1c10f1365d4e6d95c000000007ee4a21a1cbe1dc84c2d941ffaf144a3e23bf314f2b344fc25c7f2de8b6af3e17d27f5ee844b225985ab6e2775cf2d48e223205e98041ddc87be532abed584f0411f5729500493c9cc3f4dd15e862927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #245: special case hash", + "NoBenchmark": false + }, + { + "Input": "01603d3982bf77d7a3fef3183ed092000000003a227420db4088b20fe0e9d84a2ded5b7ec8e90e7bf11f967a3d95110c41b99db3b5aa8d330eb9d638781688e97d5792c53628155e1bfc46fb1a67e3088de049c328ae1f44ec69238a009808f92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #246: special case hash", + "NoBenchmark": false + }, + { + "Input": "9ea6994f1e0384c8599aa02e6cf66d9c000000004d89ef50b7e9eb0cfbff7363bdae7bcb580bf335efd3bc3d31870f923eaccafcd40ec2f605976f15137d8b8ff6dfa12f19e525270b0106eecfe257499f373a4fb318994f24838122ce7ec3c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #247: special case hash", + "NoBenchmark": false + }, + { + "Input": "d03215a8401bcf16693979371a01068a4700000000e2fa5bf692bc670905b18c50f9c4f0cd6940e162720957ffff513799209b78596956d21ece251c2401f1c6d7033a0a787d338e889defaaabb106b95a4355e411a59c32aa5167dfab2447262927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #248: special case hash", + "NoBenchmark": false + }, + { + "Input": "307bfaaffb650c889c84bf83f0300e5dc87e000000008408fd5f64b582e3bb14f612820687604fa01906066a378d67540982e29575d019aabe90924ead5c860d3f9367702dd7dd4f75ea98afd20e328a1a99f4857b316525328230ce294b0fef2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #249: special case hash", + "NoBenchmark": false + }, + { + "Input": "bab5c4f4df540d7b33324d36bb0c157551527c00000000e4af574bb4d54ea6b89505e407657d6e8bc93db5da7aa6f5081f61980c1949f56b0f2f507da5782a7ac60d31904e3669738ffbeccab6c3656c08e0ed5cb92b3cfa5e7f71784f9c50212927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #250: special case hash", + "NoBenchmark": false + }, + { + "Input": "d4ba47f6ae28f274e4f58d8036f9c36ec2456f5b00000000c3b869197ef5e15ebbd16fbbb656b6d0d83e6a7787cd691b08735aed371732723e1c68a40404517d9d8e35dba96028b7787d91315be675877d2d097be5e8ee34560e3e7fd25c0f002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #251: special case hash", + "NoBenchmark": false + }, + { + "Input": "79fd19c7235ea212f29f1fa00984342afe0f10aafd00000000801e47f8c184e12ec9760122db98fd06ea76848d35a6da442d2ceef7559a30cf57c61e92df327e7ab271da90859479701fccf86e462ee3393fb6814c27b760c4963625c0a198782927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #252: special case hash", + "NoBenchmark": false + }, + { + "Input": "8c291e8eeaa45adbaf9aba5c0583462d79cbeb7ac97300000000a37ea6700cda54e76b7683b6650baa6a7fc49b1c51eed9ba9dd463221f7a4f1005a89fe00c592ea076886c773eb937ec1cc8374b7915cfd11b1c1ae1166152f2f7806a31c8fd2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #253: special case hash", + "NoBenchmark": false + }, + { + "Input": "0eaae8641084fa979803efbfb8140732f4cdcf66c3f78a000000003c278a6b215291deaf24659ffbbce6e3c26f6021097a74abdbb69be4fb10419c0c496c946665d6fcf336d27cc7cdb982bb4e4ecef5827f84742f29f10abf83469270a03dc32927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #254: special case hash", + "NoBenchmark": false + }, + { + "Input": "e02716d01fb23a5a0068399bf01bab42ef17c6d96e13846c00000000afc0f89d207a3241812d75d947419dc58efb05e8003b33fc17eb50f9d15166a88479f107cdee749f2e492b213ce80b32d0574f62f1c5d70793cf55e382d5caadf75927672927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #255: special case hash", + "NoBenchmark": false + }, + { + "Input": "9eb0bf583a1a6b9a194e9a16bc7dab2a9061768af89d00659a00000000fc7de16554e49f82a855204328ac94913bf01bbe84437a355a0a37c0dee3cf81aa7728aea00de2507ddaf5c94e1e126980d3df16250a2eaebc8be486effe7f22b4f9292927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #256: special case hash", + "NoBenchmark": false + }, + { + "Input": "62aac98818b3b84a2c214f0d5e72ef286e1030cb53d9a82b690e00000000cd15a54c5062648339d2bff06f71c88216c26c6e19b4d80a8c602990ac82707efdfce99bbe7fcfafae3e69fd016777517aa01056317f467ad09aff09be73c9731b0d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #257: special case hash", + "NoBenchmark": false + }, + { + "Input": "3760a7f37cf96218f29ae43732e513efd2b6f552ea4b6895464b9300000000c8975bd7157a8d363b309f1f444012b1a1d23096593133e71b4ca8b059cff37eaf7faa7a28b1c822baa241793f2abc930bd4c69840fe090f2aacc46786bf9196222927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #258: special case hash", + "NoBenchmark": false + }, + { + "Input": "0da0a1d2851d33023834f2098c0880096b4320bea836cd9cbb6ff6c8000000005694a6f84b8f875c276afd2ebcfe4d61de9ec90305afb1357b95b3e0da43885e0dffad9ffd0b757d8051dec02ebdf70d8ee2dc5c7870c0823b6ccc7c679cbaa42927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #259: special case hash", + "NoBenchmark": false + }, + { + "Input": "ffffffff293886d3086fd567aafd598f0fe975f735887194a764a231e82d289aa0c30e8026fdb2b4b4968a27d16a6d08f7098f1a98d21620d7454ba9790f1ba65e470453a8a399f15baf463f9deceb53acc5ca64459149688bd2760c654243392927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #260: special case hash", + "NoBenchmark": false + }, + { + "Input": "7bffffffff2376d1e3c03445a072e24326acdc4ce127ec2e0e8d9ca99527e7b7614ea84acf736527dd73602cd4bb4eea1dfebebd5ad8aca52aa0228cf7b99a88737cc85f5f2d2f60d1b8183f3ed490e4de14368e96a9482c2a4dd193195c902f2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #261: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2b5ffffffffebb251b085377605a224bc80872602a6e467fd016807e97fa395bead6734ebe44b810d3fb2ea00b1732945377338febfd439a8d74dfbd0f942fa6bb18eae36616a7d3cad35919fd21a8af4bbe7a10f73b3e036a46b103ef56e2a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #262: special case hash", + "NoBenchmark": false + }, + { + "Input": "641227ffffffff6f1b96fa5f097fcf3cc1a3c256870d45a67b83d0967d4b20c0499625479e161dacd4db9d9ce64854c98d922cbf212703e9654fae182df9bad242c177cf37b8193a0131108d97819edd9439936028864ac195b64fca76d9d6932927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #263: special case hash", + "NoBenchmark": false + }, + { + "Input": "958415d8ffffffffabad03e2fc662dc3ba203521177502298df56f36600e0f8b08f16b8093a8fb4d66a2c8065b541b3d31e3bfe694f6b89c50fb1aaa6ff6c9b29d6455e2d5d1779748573b611cb95d4a21f967410399b39b535ba3e5af81ca2e2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #264: special case hash", + "NoBenchmark": false + }, + { + "Input": "f1d8de4858ffffffff1281093536f47fe13deb04e1fbe8fb954521b6975420f8be26231b6191658a19dd72ddb99ed8f8c579b6938d19bce8eed8dc2b338cb5f8e1d9a32ee56cffed37f0f22b2dcb57d5c943c14f79694a03b9c5e96952575c892927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #265: special case hash", + "NoBenchmark": false + }, + { + "Input": "0927895f2802ffffffff10782dd14a3b32dc5d47c05ef6f1876b95c81fc31def15e76880898316b16204ac920a02d58045f36a229d4aa4f812638c455abe0443e74d357d3fcb5c8c5337bd6aba4178b455ca10e226e13f9638196506a19391232927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #266: special case hash", + "NoBenchmark": false + }, + { + "Input": "60907984aa7e8effffffff4f332862a10a57c3063fb5a30624cf6a0c3ac80589352ecb53f8df2c503a45f9846fc28d1d31e6307d3ddbffc1132315cc07f16dad1348dfa9c482c558e1d05c5242ca1c39436726ecd28258b1899792887dd0a3c62927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #267: special case hash", + "NoBenchmark": false + }, + { + "Input": "c6ff198484939170ffffffff0af42cda50f9a5f50636ea6942d6b9b8cd6ae1e24a40801a7e606ba78a0da9882ab23c7677b8642349ed3d652c5bfa5f2a9558fb3a49b64848d682ef7f605f2832f7384bdc24ed2925825bf8ea77dc59817257822927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #268: special case hash", + "NoBenchmark": false + }, + { + "Input": "de030419345ca15c75ffffffff8074799b9e0956cc43135d16dfbe4d27d7e68deacc5e1a8304a74d2be412b078924b3bb3511bac855c05c9e5e9e44df3d61e967451cd8e18d6ed1885dd827714847f96ec4bb0ed4c36ce9808db8f714204f6d12927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #269: special case hash", + "NoBenchmark": false + }, + { + "Input": "6f0e3eeaf42b28132b88fffffffff6c8665604d34acb19037e1ab78caaaac6ff2f7a5e9e5771d424f30f67fdab61e8ce4f8cd1214882adb65f7de94c31577052ac4e69808345809b44acb0b2bd889175fb75dd050c5a449ab9528f8f78daa10c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #270: special case hash", + "NoBenchmark": false + }, + { + "Input": "cdb549f773b3e62b3708d1ffffffffbe48f7c0591ddcae7d2cb222d1f8017ab9ffcda40f792ce4d93e7e0f0e95e1a2147dddd7f6487621c30a03d710b330021979938b55f8a17f7ed7ba9ade8f2065a1fa77618f0b67add8d58c422c2453a49a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #271: special case hash", + "NoBenchmark": false + }, + { + "Input": "2c3f26f96a3ac0051df4989bffffffff9fd64886c1dc4f9924d8fd6f0edb048481f2359c4faba6b53d3e8c8c3fcc16a948350f7ab3a588b28c17603a431e39a8cd6f6a5cc3b55ead0ff695d06c6860b509e46d99fccefb9f7f9e101857f743002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #272: special case hash", + "NoBenchmark": false + }, + { + "Input": "ac18f8418c55a2502cb7d53f9affffffff5c31d89fda6a6b8476397c04edf411dfc8bf520445cbb8ee1596fb073ea283ea130251a6fdffa5c3f5f2aaf75ca808048e33efce147c9dd92823640e338e68bfd7d0dc7a4905b3a7ac711e577e90e72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #273: special case hash", + "NoBenchmark": false + }, + { + "Input": "4f9618f98e2d3a15b24094f72bb5ffffffffa2fd3e2893683e5a6ab8cf0ee610ad019f74c6941d20efda70b46c53db166503a0e393e932f688227688ba6a576293320eb7ca0710255346bdbb3102cdcf7964ef2e0988e712bc05efe16c1993452927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #274: special case hash", + "NoBenchmark": false + }, + { + "Input": "422e82a3d56ed10a9cc21d31d37a25ffffffff67edf7c40204caae73ab0bc75aac8096842e8add68c34e78ce11dd71e4b54316bd3ebf7fffdeb7bd5a3ebc1883f5ca2f4f23d674502d4caf85d187215d36e3ce9f0ce219709f21a3aac003b7a82927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #275: special case hash", + "NoBenchmark": false + }, + { + "Input": "7075d245ccc3281b6e7b329ff738fbb417a5ffffffffa0842d9890b5cf95d018677b2d3a59b18a5ff939b70ea002250889ddcd7b7b9d776854b4943693fb92f76b4ba856ade7677bf30307b21f3ccda35d2f63aee81efd0bab6972cc0795db552927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #276: special case hash", + "NoBenchmark": false + }, + { + "Input": "3c80de54cd9226989443d593fa4fd6597e280ebeffffffffc1847eb76c217a95479e1ded14bcaed0379ba8e1b73d3115d84d31d4b7c30e1f05e1fc0d5957cfb0918f79e35b3d89487cf634a4f05b2e0c30857ca879f97c771e877027355b24432927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #277: special case hash", + "NoBenchmark": false + }, + { + "Input": "de21754e29b85601980bef3d697ea2770ce891a8cdffffffffc7906aa794b39b43dfccd0edb9e280d9a58f01164d55c3d711e14b12ac5cf3b64840ead512a0a31dbe33fa8ba84533cd5c4934365b3442ca1174899b78ef9a3199f495843897722927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #278: special case hash", + "NoBenchmark": false + }, + { + "Input": "8f65d92927cfb86a84dd59623fb531bb599e4d5f7289ffffffff2f1f2f57881c5b09ab637bd4caf0f4c7c7e4bca592fea20e9087c259d26a38bb4085f0bbff1145b7eb467b6748af618e9d80d6fdcd6aa24964e5a13f885bca8101de08eb0d752927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #279: special case hash", + "NoBenchmark": false + }, + { + "Input": "6b63e9a74e092120160bea3877dace8a2cc7cd0e8426cbfffffffffafc8c3ca85e9b1c5a028070df5728c5c8af9b74e0667afa570a6cfa0114a5039ed15ee06fb1360907e2d9785ead362bb8d7bd661b6c29eeffd3c5037744edaeb9ad990c202927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #280: special case hash", + "NoBenchmark": false + }, + { + "Input": "fc28259702a03845b6d75219444e8b43d094586e249c8699ffffffffe852512e0671a0a85c2b72d54a2fb0990e34538b4890050f5a5712f6d1a7a5fb8578f32edb1846bab6b7361479ab9c3285ca41291808f27fd5bd4fdac720e5854713694c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #281: special case hash", + "NoBenchmark": false + }, + { + "Input": "1273b4502ea4e3bccee044ee8e8db7f774ecbcd52e8ceb571757ffffffffe20a7673f8526748446477dbbb0590a45492c5d7d69859d301abbaedb35b2095103a3dc70ddf9c6b524d886bed9e6af02e0e4dec0d417a414fed3807ef4422913d7c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #282: special case hash", + "NoBenchmark": false + }, + { + "Input": "08fb565610a79baa0c566c66228d81814f8c53a15b96e602fb49ffffffffff6e7f085441070ecd2bb21285089ebb1aa6450d1a06c36d3ff39dfd657a796d12b5249712012029870a2459d18d47da9aa492a5e6cb4b2d8dafa9e4c5c54a2b9a8b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #283: special case hash", + "NoBenchmark": false + }, + { + "Input": "d59291cc2cf89f3087715fcb1aa4e79aa2403f748e97d7cd28ecaefeffffffff914c67fb61dd1e27c867398ea7322d5ab76df04bc5aa6683a8e0f30a5d287348fa07474031481dda4953e3ac1959ee8cea7e66ec412b38d6c96d28f6d37304ea2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #284: special case hash", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000001000000000000000000000000fffffffffffffffffffffffcffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254ed705d16f80987e2d9b1a6957d29ce22febf7d10fa515153182415c8361baaca4b1fc105ee5ce80d514ec1238beae2037a6f83625593620d460819e8682160926", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #636: r too large", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254fffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254e3cd8d2f81d6953b0844c09d7b560d527cd2ef67056893eadafa52c8501387d59ee41fdb4d10402ce7a0c5e3b747adfa3a490b62a6b7719068903485c0bb6dc2d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #637: r,s are large", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd909135bdb6799286170f5ead2de4f6511453fe50914f3df2de54a36383df8dd48240cd81edd91cb6936133508c3915100e81f332c4545d41189b481196851378e05b06e72d4a1bff80ea5db514aa2f93ea6dd6d9c0ae27b7837dc432f9ce89d9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #638: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27b4577ca009376f71303fd5dd227dcef5deb773ad5f5a84360644669ca249a5b062947356748b0fc17f1704c65aa1dca6e1bfe6779756fa616d91eaad13df2c0b38c17f3d0672e7409cfc5992a99fff12b84a4f8432293b431113f1b2fb579d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #639: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6324d5555555550000000055555555555555553ef7a8e48d07df81a693439654210c707a736d8e326a9ca62bbe25a34ea4e3633b499a96afa7aaa3fcf3fd88f8e07edeb3e45879d8622b93e818443a686e869eeda7bf9ae46aa3eafcc48a5934864627", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #651: r and s^-1 are close to n", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8555555550000000055555555555555553ef7a8e48d07df81a693439654210c700203736fcb198b15d8d7a0c80f66dddd15259240aa78d08aae67c467de04503434383438d5041ea9a387ee8e4d4e84b4471b160c6bcf2568b072f8f20e87a996", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #654: point at infinity during verify", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a878d844dc7f16b73b1f2a39730da5d8cd99fe2e70a18482384e37dcd2bfea02e1ed6572e01eb7a8d113d02c666c45ef22d3b9a6a6dea99aa43a8183c26e75d336", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #655: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a9dec6c8257dde94110eacc8c09d2e5789cc5beb81a958b02b4d62da9599a7401466fae1614174be63970b83f6524421067b06dd6f4e9c56baca4e344fdd690f1d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #656: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25a17f5b75a35ed64623ca5cbf1f91951292db0c23f0c2ea24c3d0cad0988cabc083a7a618625c228940730b4fa3ee64faecbb2fc20fdde7c58b3a3f6300424dc6", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #657: u1 == 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c04ba0cba291a37db13f33bf90dab628c04ec8393a0200419e9eaa1ebcc9fb5c31f3a0a0e6823a49b625ad57b12a32d4047970fc3428f0f0049ecf4265dc12f62", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #658: u1 == n - 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70555555550000000055555555555555553ef7a8e48d07df81a693439654210c70692b6c828e0feed63d8aeaa2b7322f9ccbe8723a1ed39f229f204a434b8900efa1f6f6abcb38ea3b8fde38b98c7c271f274af56a8c5628dc3329069ae4dd5716", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #659: u2 == 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70aaaaaaaa00000000aaaaaaaaaaaaaaaa7def51c91a0fbf034d26872ca84218e100cefd9162d13e64cb93687a9cd8f9755ebb5a3ef7632f800f84871874ccef09543ecbeaf7e8044ef721be2fb5f549e4b8480d2587404ebf7dbbef2c54bc0cb1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #660: u2 == n - 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd710f8e3edc7c2d5a3fd23de844002bb949d9f794f6d5405f6d97c1bb03dd2bd2b975183b42551cf52f291d5c1921fd5e12f50c8c85a4beb9de03efa3f0f244862243018e6866df922dc313612020311ff21e242ce3fb15bc78c406b25ab43091", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #661: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdedffbc270f722c243069a7e5f40335a61a58525c7b4db2e7a8e269274ffe4e1bc25f1d166f3e211cdf042a26f8abf6094d48b8d17191d74ed71714927446699965d06dd6a88abfa49e8b4c5da6bb922851969adf9604b5accfb52a114e77ccdb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #662: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffda25adcae105ed7ff4f95d2344e24ee523314c3e178525d007904b68919ba4d538fe5e88243a76e41a004236218a3c3a2d6eee398a23c3a0b008d7f0164cbc0ca98a20d1bdcf573513c7cfd9b83c63e3a82d40127c897697c86b8cb387af7f240", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #663: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd2e4348c645707dce6760d773de3f3e87346924b2f64bd3dd0297e766b5805ebb02148256b530fbc470c7b341970b38243ecee6d5a840a37beca2efb37e8dff2cc0adbea0882482a7489ca703a399864ba987eeb6ddb738af53a83573473cb30d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #664: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd348c673b07dce3920d773de3f3e87408869e916dbcf797d8f9684fb67753d1dca34db012ce6eda1e9c7375c5fcf3e54ed698e19615124273b3a621d021c76f8e777458d6f55a364c221e39e1205d5510bb4fbb7ddf08d8d8fdde13d1d6df7f14", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #665: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd6918ce760fb9c7241aee7bc7e7d0e8110d3d22db79ef2fb1f2d09f6ceea7a3b8b97af3fe78be15f2912b6271dd8a43badb6dd2a1b315b2ce7ae37b4e7778041d930d71ee1992d2466495c42102d08e81154c305307d1dcd52d0fa4c479b278e7", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #666: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd73b3c694391d8eadde3f3e874089464715ac20e4c126bbf6d864d648969f5b5a81e7198a3c3f23901cedc7a1d6eff6e9bf81108e6c35cd8559139af3135dbcbb9ef1568530291a8061b90c9f4285eefcba990d4570a4e3b7b737525b5d580034", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #667: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb07ac7a86948c2c2989a16db1930ef1b89ce112595197656877e53c41457f28ab4d792ca121d1dba39cb9de645149c2ab573e8becc6ddff3cc9960f188ddf737f90ba23664153e93262ff73355415195858d7be1315a69456386de68285a3c8", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #668: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27e4d82cb6c061dd9337c69bf9332ed3d198662d6f2299443f62c861187db648518412b69af43aae084476a68d59bbde51fbfa9e5be80563f587c9c2652f88ef2d3b90d25baa6bdb7b0c55e5240a3a98fbc24afed8523edec1c70503fc10f233", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #669: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffde7c5cf3aac2e88923b77850515fff6a12d13b356dfe9ec275c3dd81ae94609a4a08f14a644b9a935dffea4761ebaf592d1f66fe6cd373aa7f5d370af34f8352da54b5bc4025cf335900a914c2934ec2fec7a396d0a7affcad732a5741c7aaaf5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #670: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdc77838df91c1e953e016e10bddffea2317f9fee32bacfe553cede9e57a748f68ccf2296a6a89b62b90739d38af4ae3a20e9f45715b90044639241061e33f8f8caace0046491eeaa1c6e9a472b96d88f4af83e7ff1bb84438c7e058034412ae08", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #671: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd8ef071c02383d2a6c02dc217bbffd446730d0318b0425e2586220907f885f97f94b0fc1525bcabf82b1f34895e5819a06c02b23e04002276e165f962c86e3927be7c2ab4d0b25303204fb32a1f8292902792225e16a6d2dbfb29fbc89a9c3376", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #672: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5668aaa0b545bbf9a044a32399ffbe69ce20074e34d7bdf5cf56282a769763965351f37e1de0c88c508527d89882d183ccdcf2efca407edb0627cadfd16de6ec44b4b57cdf960d32ebcc4c97847eed218425853b5b675eb781b766a1a1300349", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #673: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdd12d6e56882f6c0027cae91a27127728f7fddf478fb4fdc2b65f40a60b0eb952748bbafc320e6735cb64019710a269c6c2b5d147bdc831325cb2fb276ac971a69d655e9a755bc9d800ad21ee3fd4d980d93a7a49a8c5ccd37005177578f51163", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #674: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffffaaaaaaaaffffffffffffffffe9a2538f37b28a2c513dee40fecbb71a14b3bbd75c5e1c0c36535a934d4ab85112410b3b90fa97a31c33038964fd85cc112f7d837f8f9c36b460d636c965a5f818f2b50c5d00fb3f9705561dd6631883", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #675: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdb62f26b5f2a2b26f6de86d42ad8a13da3ab3cccd0459b201de009e526adf21f2d823533c04cd8edc6d6f950a8e08ade04a9bafa2f14a590356935671ae9305bf43178d1f88b6a57a96924c265f0ddb75b58312907b195acb59d7797303123775", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #676: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb1d9ac949dd748cd02bbbe749bd351cd57b38bb61403d700686aa7b4c90851edb2b3408b3167d91030624c6328e8ce3ec108c105575c2f3d209b92e654bab69c34318139c50b0802c6e612f0fd3189d800df7c996d5d7b7c3d6be82836fa258", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #677: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd66755a00638cdaec1c732513ca0234ece52545dac11f816e818f725b4f60aaf209179ce7c59225392216453b2ac1e9d178c24837dfae26bc1dd7ab60638527425556b42e330289f3b826b2db7a86d19d45c2860a59f2be1ddcc3b691f95a9255", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #678: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd55a00c9fcdaebb6032513ca0234ecfffe98ebe492fdf02e48ca48e982beb366901959fb8deda56e5467b7e4b214ea4c2d0c2fb29d70ff19b6b1eccebd6568d7ed9dbd77a918297fd970bff01e1343f6925167db5a14d098a211c39cc3a413398", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #679: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdab40193f9b5d76c064a27940469d9fffd31d7c925fbe05c919491d3057d66cd2567f1fdc387e5350c852b4e8f8ba9d6d947e1c5dd7ccc61a5938245dd6bcab3a9960bebaf919514f9535c22eaaf0b5812857970e26662267b1f3eb1011130a11", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #680: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdca0234ebb5fdcb13ca0234ecffffffffcb0dadbbc7f549f8a26b4408d0dc86003499f974ff4ca6bbb2f51682fd5f51762f9dd6dd2855262660b36d46d3e4bec2f498fae2487807e220119152f0122476c64d4fa46ddce85c4546630f0d5c5e81", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #681: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff3ea3677e082b9310572620ae19933a9e65b285598711c77298815ad32c5c01662cf00c1929596257db13b26ecf30d0f3ec4b9f0351b0f27094473426e986a086060d086eee822ddd2fc744247a0154b57f7a69c51d9fdafa484e4ac7", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #682: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd266666663bbbbbbbe6666666666666665b37902e023fab7c8f055d86e5cc41f491d4cba813a04d86dbae94c23be6f52c15774183be7ba5b2d9f3cf010b160501900b8adfea6491019a9ac080d516025a541bf4b952b0ad7be4b1874b02fd544a", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #683: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff36db6db7a492492492492492146c573f4c6dfc8d08a443e258970b09ef7fd0a3a36386638330ecad41e1a3b302af36960831d0210c614b948e8aa124ef0d6d800e4047d6d3c1be0fdeaf11fcd8cab5ab59c730eb34116e35a8c7d098", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #684: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff2aaaaaab7fffffffffffffffc815d0e60b3e596ecb1ad3a27cfd49c4a521dab13cc9152d8ca77035a607fea06c55cc3ca5dbeb868cea92eafe93df2a7bfb9b28531996635e6a5ccaa2826a406ce1111bdb9c2e0ca36500418a2f43de", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #685: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffff55555555ffffffffffffffffd344a71e6f651458a27bdc81fd976e37474d58a4eec16e0d565f2187fe11d4e8e7a2683a12f38b4fc01d1237a81a10976e55f73bb7cdda46bdb67ef77f6fd2969df2b67920fb5945fde3a517a6ded4cd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #686: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd3fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192aa692da5cd4309d9a6e5cb525c37da8fa0879f7b57208cdabbf47d223a5b23a62140e0daa78cfdd207a7389aaed61738b17fc5fc3e6a5ed3397d2902e9125e6ab4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #687: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5d8ecd64a4eeba466815ddf3a4de9a8e6abd9c5db0a01eb80343553da648428f85689b3e0775c7718a90279f14a8082cfcd4d1f1679274f4e9b8805c570a0670167fcc5ca734552e09afa3640f4a034e15b9b7ca661ec7ff70d3f240ebe705b1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #688: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569f21d907e3890916dc4fa1f4703c1e50d3f54ddf7383e44023a41de562aa18ed80158137755b901f797a90d4ca8887e023cb2ef63b2ba2c0d455edaef42cf237e2a964fc00d377a8592b8b61aafa7a4aaa7c7b9fd2b41d6e0e17bd1ba5677edcd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #689: point duplication during verification", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569f21d907e3890916dc4fa1f4703c1e50d3f54ddf7383e44023a41de562aa18ed80158137755b901f797a90d4ca8887e023cb2ef63b2ba2c0d455edaef42cf237ed569b03ef2c8857b6d4749e550585b5558384603d4be291f1e842e45a9881232", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #690: duplication bug", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c703333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aa9664ce273320d918d8bdb2e61201b4549b36b7cdc54e33b84adb6f2c10aac831e49e68831f18bda2973ac3d76bfbc8c5ee1cceed2dd862e2dc7c915c736cef1f4", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #693: comparison with point at infinity ", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978555555550000000055555555555555553ef7a8e48d07df81a693439654210c70961691a5e960d07a301dbbad4d86247ec27d7089faeb3ddd1add395efff1e0fe7254622cc371866cdf990d2c5377790e37d1f1519817f09a231bd260a9e78aeb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #694: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc5d283e13ce8ca60da868e3b0fb33e6b4f1074793274e2928250e71e2aca63e9c214dc74fa25371fb4d9e506d418ed9a1bfd6d0c8bb6591d3e0f44505a84886ce", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #695: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa70fc351da038ae0803bd1d86514ae0462f9f8216551d9315aa9d297f792eef6a341c74eed786f2d33da35360ca7aa925e753f00d6077a1e9e5fc339d634019c73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #696: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc476699783333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaa1e34c8f16d138673fee55c080547c2bfd4de7550065f638322bba9430ce4b60662be9bb512663aa4d7df8ab3f3b4181c5d44a7bdf42436620b7d8a6b81ac936", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #697: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997849249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c1857e1a8a8338d7fd8cf41d322a302d2078a87a23c7186150ed7cda6e52817c1bdfd0a9135a89d21ce821e29014b2898349254d748272b2d4eb8d59ee34c615377f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #698: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997816a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb5c19fe227a61abc65c61ee7a018cc9571b2c6f663ea33583f76a686f64be078b7b4a0d734940f613d52bc48673b457c2cf78492490a5cc5606c0541d17b24ddb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #699: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296555555550000000055555555555555553ef7a8e48d07df81a693439654210c70db02d1f3421d600e9d9ef9e47419dba3208eed08c2d4189a5db63abeb2739666e0ed26967b9ada9ed7ffe480827f90a0d210d5fd8ec628e31715e6b24125512a", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #700: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc6222d1962655501893c29e441395b6c05711bd3ed5a0ef72cfab338b88229c4baaae079cb44a1af070362aaa520ee24cac2626423b0bf81af1c54311d8e2fd23", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #701: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa74ccfa24c67f3def7fa81bc99c70bb0419c0952ba599f4c03361da184b04cdca5db76b797f7f41d9c729a2219478a7e629728df870800be8cf6ca7a0a82153bfa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #702: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2963333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaea1c72c91034036bac71402b6e9ecc4af3dbde7a99dc574061e99fefff9d84dab7dd057e75b78ac6f56e34eb048f0a9d29d5d055408c90d02bc2ea918c18cb63", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #703: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29649249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185c2879a66d86cb20b820b7795da2da62b38924f7817d1cd350d936988e90e79bc5431a7268ff6931c7a759de024eff90bcb0177216db6fd1f3aaaa11fa3b6a083", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #704: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29616a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bbab1c0f273f74abc2b848c75006f2ef3c54c26df27711b06558f455079aee0ba3df510f2ecef6d9a05997c776f14ad6456c179f0a13af1771e4d6c37fa48b47f2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #705: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #706: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #707: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #708: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #709: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023a8ea150cb80125d7381c4c1f1da8e9de2711f9917060406a73d7904519e51388f3ab9fa68bd47973a73b2d40480c2ba50c22c9d76ec217257288293285449b8604aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1210: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2530e782f964b2e2ff065a051bc7adc20615d8c43a1365713c88268822c253bcce5b16df652aa1ecb2dc8b46c515f9604e2e84cacfa7c6eec30428d2d3f4e08ed504aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1211: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855b292a619339f6e567a305c951c0dcbcc42d16e47f219f9e98e76e09d8770b34a0177e60492c5a8242f76f07bfe3661bde59ec2a17ce5bd2dab2abebdf89a62e204aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1212: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "de47c9b27eb8d300dbb5f2c353e632c393262cf06340c4fa7f1b40c4cbd36f90986e65933ef2ed4ee5aada139f52b70539aaf63f00a91f29c69178490d57fb713dafedfb8da6189d372308cbf1489bbbdabf0c0217d1c0ff0f701aaa7a694b9c04aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1213: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d434e262a49eab7781e353a3565e482550dd0fd5defa013c7f29745eff3569f19b0c0a93f267fb6052fd8077be769c2b98953195d7bc10de844218305c6ba17a4f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1303: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f910fe774355c04d060f76d79fd7a772e421463489221bf0a33add0be9b1979110b500dcba1c69a8fbd43fa4f57f743ce124ca8b91a1f325f3fac6181175df557374f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1304: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91bb40bf217bed3fb3950c7d39f03d36dc8e3b2cd79693f125bfd06595ee1135e3541bf3532351ebb032710bdb6a1bf1bfc89a1e291ac692b3fa4780745bb556774f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1305: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91664eb7ee6db84a34df3c86ea31389a5405badd5ca99231ff556d3e75a233e73a59f3c752e52eca46137642490a51560ce0badc678754b8f72e51a2901426a1bd3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1306: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f914cd0429bbabd2827009d6fcd843d4ce39c3e42e2d1631fd001985a79d1fd8b439638bf12dd682f60be7ef1d0e0d98f08b7bca77a1a2b869ae466189d2acdabe33cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1307: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91e56c6ea2d1b017091c44d8b6cb62b9f460e3ce9aed5e5fd41e8added97c56c04a308ec31f281e955be20b457e463440b4fcf2b80258078207fc1378180f89b553cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1308: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f911158a08d291500b4cabed3346d891eee57c176356a2624fb011f8fbbf3466830228a8c486a736006e082325b85290c5bc91f378b75d487dda46798c18f2855193cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1309: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b1db9289649f59410ea36b0c0fc8d6aa2687b29176939dd23e0dde56d309fa9d3e1535e4280559015b0dbd987366dcf43a6d1af5c23c7d584e1c3f48a12513363cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1310: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7b16e762286cb96446aa8d4e6e7578b0a341a79f2dd1a220ac6f0ca4e24ed86ddc60a700a139b04661c547d07bbb0721780146df799ccf55e55234ecb8f12bc3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1311: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d82a7c2717261187c8e00d8df963ff35d796edad36bc6e6bd1c91c670d9105b43dcabddaf8fcaa61f4603e7cbac0f3c0351ecd5988efb23f680d07debd1399292829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1312: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f915eb9c8845de68eb13d5befe719f462d77787802baff30ce96a5cba063254af782c026ae9be2e2a5e7ca0ff9bbd92fb6e44972186228ee9a62b87ddbe2ef66fb52829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1313: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9196843dd03c22abd2f3b782b170239f90f277921becc117d0404a8e4e36230c28f2be378f526f74a543f67165976de9ed9a31214eb4d7e6db19e1ede123dd991d2829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1314: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91766456dce1857c906f9996af729339464d27e9d98edc2d0e3b760297067421f6402385ecadae0d8081dccaf5d19037ec4e55376eced699e93646bfbbf19d0b41fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1315: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91c605c4b2edeab20419e6518a11b2dbc2b97ed8b07cced0b19c34f777de7b9fd9edf0f612c5f46e03c719647bc8af1b29b2cde2eda700fb1cff5e159d47326dbafffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1316: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d48b68e6cabfe03cf6141c9ac54141f210e64485d9929ad7b732bfe3b7eb8a84feedae50c61bd00e19dc26f9b7e2265e4508c389109ad2f208f0772315b6c941fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1317: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7c81457d4aeb6aa65957098569f0479710ad7f6595d5874c35a93d12a5dd4c7b7961a0b652878c2d568069a432ca18a1a9199f2ca574dad4b9e3a05c0a1cdb300000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1318: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f916b01332ddb6edfa9a30a1321d5858e1ee3cf97e263e669f8de5e9652e76ff3f75939545fced457309a6a04ace2bd0f70139c8f7d86b02cb1cc58f9e69e96cd5a00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1319: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91efdb884720eaeadc349f9fc356b6c0344101cd2fd8436b7d0e6a4fb93f106361f24bee6ad5dc05f7613975473aadf3aacba9e77de7d69b6ce48cb60d8113385d00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1320: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9131230428405560dcb88fb5a646836aea9b23a23dd973dcbe8014c87b8b20eb070f9344d6e812ce166646747694a41b0aaf97374e19f3c5fb8bd7ae3d9bd0beffbcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1321: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91caa797da65b320ab0d5c470cda0b36b294359c7db9841d679174db34c4855743cf543a62f23e212745391aaf7505f345123d2685ee3b941d3de6d9b36242e5a0bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1322: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f917e5f0ab5d900d3d3d7867657e5d6d36519bc54084536e7d21c336ed8001859459450c07f201faec94b82dfb322e5ac676688294aad35aa72e727ff0b19b646aabcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1323: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d7d70c581ae9e3f66dc6a480bf037ae23f8a1e4a2136fe4b03aa69f0ca25b35689c460f8a5a5c2bbba962c8a3ee833a413e85658e62a59e2af41d9127cc47224bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1324: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91341c1b9ff3c83dd5e0dfa0bf68bcdf4bb7aa20c625975e5eeee34bb396266b3472b69f061b750fd5121b22b11366fad549c634e77765a017902a67099e0a4469bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1325: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a9bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_test.json EcdsaVerify SHA-256 #1326: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b855d442f5b3c7b11eb6c4e0ae7525fe710fab9aa7c77a67f79e6fadd762927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #1: signature malleability", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5740946b2a147f59262ee6f5bc90bd01ed280528b62b3aed5fc93f06f739b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #3: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023d45c5741946b2a137f59262ee6f5bc91001af27a5e1117a64733950642a3d1e8b329f479a2bbd0a5c384ee1493b1f5186a87139cac5df4087c134b49156847db2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #5: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050232ba3a8be6b94d5ec80a6d9d1190a436effe50d85a1eee859b8cc6af9bd5c2e184cd60b865d442f5a3c7b11eb6c4e0ae79578ec6353a20bf783ecb4b6ea97b8252927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #8: Modified r or s, e.g. by adding or subtracting the order of the group", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #9: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #10: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #11: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #12: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #13: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #14: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #15: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #16: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #17: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #18: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #19: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #20: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #21: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca6050230000000000000000000000000000000000000000000000000000000000000001ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #22: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255100000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #23: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255100000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #24: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #25: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #26: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #27: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #28: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #29: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #30: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #31: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #32: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #33: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #34: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #35: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #36: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255200000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #37: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255200000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #38: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #39: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #40: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #41: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #42: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632552ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #43: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #44: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #45: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #46: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #47: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #48: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #49: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000000ffffffffffffffffffffffffffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #50: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff0000000100000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #51: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff0000000100000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000012927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #52: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325512927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #53: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #54: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6325522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #55: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff00000001000000000000000000000000ffffffffffffffffffffffff2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #56: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023ffffffff00000001000000000000000000000001000000000000000000000000ffffffff000000010000000000000000000000010000000000000000000000002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #57: Signature with special case values for r and s", + "NoBenchmark": false + }, + { + "Input": "70239dd877f7c944c422f44dea4ed1a52f2627416faf2f072fa50c772ed6f80764a1aab5000d0e804f3e2fc02bdee9be8ff312334e2ba16d11547c97711c898e6af015971cc30be6d1a206d4e013e0997772a2f91d73286ffd683b9bb2cf4f1b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #58: Edge case for Shamir multiplication", + "NoBenchmark": false + }, + { + "Input": "00000000690ed426ccf17803ebe2bd0884bcd58a1bb5e7477ead3645f356e7a916aea964a2f6506d6f78c81c91fc7e8bded7d397738448de1e19a0ec580bf266252cd762130c6667cfe8b7bc47d27d78391e8e80c578d1cd38c3ff033be928e92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #59: special case hash", + "NoBenchmark": false + }, + { + "Input": "7300000000213f2a525c6035725235c2f696ad3ebb5ee47f140697ad25770d919cc98be2347d469bf476dfc26b9b733df2d26d6ef524af917c665baccb23c882093496459effe2d8d70727b82462f61d0ec1b7847929d10ea631dacb16b56c322927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #60: special case hash", + "NoBenchmark": false + }, + { + "Input": "ddf2000000005e0be0635b245f0b97978afd25daadeb3edb4a0161c27fe0604573b3c90ecd390028058164524dde892703dce3dea0d53fa8093999f07ab8aa432f67b0b8e20636695bb7d8bf0a651c802ed25a395387b5f4188c0c4075c886342927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #61: special case hash", + "NoBenchmark": false + }, + { + "Input": "67ab1900000000784769c4ecb9e164d6642b8499588b89855be1ec355d0841a0bfab3098252847b328fadf2f89b95c851a7f0eb390763378f37e90119d5ba3ddbdd64e234e832b1067c2d058ccb44d978195ccebb65c2aaf1e2da9b8b4987e3b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #62: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2bf09460000000076d7dbeffe125eaf02095dff252ee905e296b6350fc311cf204a9784074b246d8bf8bf04a4ceb1c1f1c9aaab168b1596d17093c5cd21d2cd51cce41670636783dc06a759c8847868a406c2506fe17975582fe648d1d88b522927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #63: special case hash", + "NoBenchmark": false + }, + { + "Input": "3554e827c700000000e1e75e624a06b3a0a353171160858129e15c544e4f0e65ed66dc34f551ac82f63d4aa4f81fe2cb0031a91d1314f835027bca0f1ceeaa0399ca123aa09b13cd194a422e18d5fda167623c3f6e5d4d6abb8953d67c0c48c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #64: special case hash", + "NoBenchmark": false + }, + { + "Input": "9b6cd3b812610000000026941a0f0bb53255ea4c9fd0cb3426e3a54b9fc6965c060b700bef665c68899d44f2356a578d126b062023ccc3c056bf0f60a237012b8d186c027832965f4fcc78a3366ca95dedbb410cbef3f26d6be5d581c11d36102927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #65: special case hash", + "NoBenchmark": false + }, + { + "Input": "883ae39f50bf0100000000e7561c26fc82a52baa51c71ca877162f93c4ae01869f6adfe8d5eb5b2c24d7aa7934b6cf29c93ea76cd313c9132bb0c8e38c96831db26a9c9e40e55ee0890c944cf271756c906a33e66b5bd15e051593883b5e99022927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #66: special case hash", + "NoBenchmark": false + }, + { + "Input": "a1ce5d6e5ecaf28b0000000000fa7cd010540f420fb4ff7401fe9fce011d0ba6a1af03ca91677b673ad2f33615e56174a1abf6da168cebfa8868f4ba273f16b720aa73ffe48afa6435cd258b173d0c2377d69022e7d098d75caf24c8c5e06b1c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #67: special case hash", + "NoBenchmark": false + }, + { + "Input": "8ea5f645f373f580930000000038345397330012a8ee836c5494cdffd5ee8054fdc70602766f8eed11a6c99a71c973d5659355507b843da6e327a28c11893db93df5349688a085b137b1eacf456a9e9e0f6d15ec0078ca60a7f83f2b10d213502927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #68: special case hash", + "NoBenchmark": false + }, + { + "Input": "660570d323e9f75fa734000000008792d65ce93eabb7d60d8d9c1bbdcb5ef305b516a314f2fce530d6537f6a6c49966c23456f63c643cf8e0dc738f7b876e675d39ffd033c92b6d717dd536fbc5efdf1967c4bd80954479ba66b0120cd16fff22927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #69: special case hash", + "NoBenchmark": false + }, + { + "Input": "d0462673154cce587dde8800000000e98d35f1f45cf9c3bf46ada2de4c568c343b2cbf046eac45842ecb7984d475831582717bebb6492fd0a485c101e29ff0a84c9b7b47a98b0f82de512bc9313aaf51701099cac5f76e68c8595fc1c1d992582927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #70: special case hash", + "NoBenchmark": false + }, + { + "Input": "bd90640269a7822680cedfef000000000caef15a6171059ab83e7b4418d7278f30c87d35e636f540841f14af54e2f9edd79d0312cfa1ab656c3fb15bfde48dcf47c15a5a82d24b75c85a692bd6ecafeb71409ede23efd08e0db9abf6340677ed2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #71: special case hash", + "NoBenchmark": false + }, + { + "Input": "33239a52d72f1311512e41222a00000000d2dcceb301c54b4beae8e284788a7338686ff0fda2cef6bc43b58cfe6647b9e2e8176d168dec3c68ff262113760f52067ec3b651f422669601662167fa8717e976e2db5e6a4cf7c2ddabb3fde9d67d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #72: special case hash", + "NoBenchmark": false + }, + { + "Input": "b8d64fbcd4a1c10f1365d4e6d95c000000007ee4a21a1cbe1dc84c2d941ffaf144a3e23bf314f2b344fc25c7f2de8b6af3e17d27f5ee844b225985ab6e2775cf2d48e223205e98041ddc87be532abed584f0411f5729500493c9cc3f4dd15e862927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #73: special case hash", + "NoBenchmark": false + }, + { + "Input": "01603d3982bf77d7a3fef3183ed092000000003a227420db4088b20fe0e9d84a2ded5b7ec8e90e7bf11f967a3d95110c41b99db3b5aa8d330eb9d638781688e97d5792c53628155e1bfc46fb1a67e3088de049c328ae1f44ec69238a009808f92927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #74: special case hash", + "NoBenchmark": false + }, + { + "Input": "9ea6994f1e0384c8599aa02e6cf66d9c000000004d89ef50b7e9eb0cfbff7363bdae7bcb580bf335efd3bc3d31870f923eaccafcd40ec2f605976f15137d8b8ff6dfa12f19e525270b0106eecfe257499f373a4fb318994f24838122ce7ec3c72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #75: special case hash", + "NoBenchmark": false + }, + { + "Input": "d03215a8401bcf16693979371a01068a4700000000e2fa5bf692bc670905b18c50f9c4f0cd6940e162720957ffff513799209b78596956d21ece251c2401f1c6d7033a0a787d338e889defaaabb106b95a4355e411a59c32aa5167dfab2447262927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #76: special case hash", + "NoBenchmark": false + }, + { + "Input": "307bfaaffb650c889c84bf83f0300e5dc87e000000008408fd5f64b582e3bb14f612820687604fa01906066a378d67540982e29575d019aabe90924ead5c860d3f9367702dd7dd4f75ea98afd20e328a1a99f4857b316525328230ce294b0fef2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #77: special case hash", + "NoBenchmark": false + }, + { + "Input": "bab5c4f4df540d7b33324d36bb0c157551527c00000000e4af574bb4d54ea6b89505e407657d6e8bc93db5da7aa6f5081f61980c1949f56b0f2f507da5782a7ac60d31904e3669738ffbeccab6c3656c08e0ed5cb92b3cfa5e7f71784f9c50212927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #78: special case hash", + "NoBenchmark": false + }, + { + "Input": "d4ba47f6ae28f274e4f58d8036f9c36ec2456f5b00000000c3b869197ef5e15ebbd16fbbb656b6d0d83e6a7787cd691b08735aed371732723e1c68a40404517d9d8e35dba96028b7787d91315be675877d2d097be5e8ee34560e3e7fd25c0f002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #79: special case hash", + "NoBenchmark": false + }, + { + "Input": "79fd19c7235ea212f29f1fa00984342afe0f10aafd00000000801e47f8c184e12ec9760122db98fd06ea76848d35a6da442d2ceef7559a30cf57c61e92df327e7ab271da90859479701fccf86e462ee3393fb6814c27b760c4963625c0a198782927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #80: special case hash", + "NoBenchmark": false + }, + { + "Input": "8c291e8eeaa45adbaf9aba5c0583462d79cbeb7ac97300000000a37ea6700cda54e76b7683b6650baa6a7fc49b1c51eed9ba9dd463221f7a4f1005a89fe00c592ea076886c773eb937ec1cc8374b7915cfd11b1c1ae1166152f2f7806a31c8fd2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #81: special case hash", + "NoBenchmark": false + }, + { + "Input": "0eaae8641084fa979803efbfb8140732f4cdcf66c3f78a000000003c278a6b215291deaf24659ffbbce6e3c26f6021097a74abdbb69be4fb10419c0c496c946665d6fcf336d27cc7cdb982bb4e4ecef5827f84742f29f10abf83469270a03dc32927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #82: special case hash", + "NoBenchmark": false + }, + { + "Input": "e02716d01fb23a5a0068399bf01bab42ef17c6d96e13846c00000000afc0f89d207a3241812d75d947419dc58efb05e8003b33fc17eb50f9d15166a88479f107cdee749f2e492b213ce80b32d0574f62f1c5d70793cf55e382d5caadf75927672927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #83: special case hash", + "NoBenchmark": false + }, + { + "Input": "9eb0bf583a1a6b9a194e9a16bc7dab2a9061768af89d00659a00000000fc7de16554e49f82a855204328ac94913bf01bbe84437a355a0a37c0dee3cf81aa7728aea00de2507ddaf5c94e1e126980d3df16250a2eaebc8be486effe7f22b4f9292927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #84: special case hash", + "NoBenchmark": false + }, + { + "Input": "62aac98818b3b84a2c214f0d5e72ef286e1030cb53d9a82b690e00000000cd15a54c5062648339d2bff06f71c88216c26c6e19b4d80a8c602990ac82707efdfce99bbe7fcfafae3e69fd016777517aa01056317f467ad09aff09be73c9731b0d2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #85: special case hash", + "NoBenchmark": false + }, + { + "Input": "3760a7f37cf96218f29ae43732e513efd2b6f552ea4b6895464b9300000000c8975bd7157a8d363b309f1f444012b1a1d23096593133e71b4ca8b059cff37eaf7faa7a28b1c822baa241793f2abc930bd4c69840fe090f2aacc46786bf9196222927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #86: special case hash", + "NoBenchmark": false + }, + { + "Input": "0da0a1d2851d33023834f2098c0880096b4320bea836cd9cbb6ff6c8000000005694a6f84b8f875c276afd2ebcfe4d61de9ec90305afb1357b95b3e0da43885e0dffad9ffd0b757d8051dec02ebdf70d8ee2dc5c7870c0823b6ccc7c679cbaa42927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #87: special case hash", + "NoBenchmark": false + }, + { + "Input": "ffffffff293886d3086fd567aafd598f0fe975f735887194a764a231e82d289aa0c30e8026fdb2b4b4968a27d16a6d08f7098f1a98d21620d7454ba9790f1ba65e470453a8a399f15baf463f9deceb53acc5ca64459149688bd2760c654243392927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #88: special case hash", + "NoBenchmark": false + }, + { + "Input": "7bffffffff2376d1e3c03445a072e24326acdc4ce127ec2e0e8d9ca99527e7b7614ea84acf736527dd73602cd4bb4eea1dfebebd5ad8aca52aa0228cf7b99a88737cc85f5f2d2f60d1b8183f3ed490e4de14368e96a9482c2a4dd193195c902f2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #89: special case hash", + "NoBenchmark": false + }, + { + "Input": "a2b5ffffffffebb251b085377605a224bc80872602a6e467fd016807e97fa395bead6734ebe44b810d3fb2ea00b1732945377338febfd439a8d74dfbd0f942fa6bb18eae36616a7d3cad35919fd21a8af4bbe7a10f73b3e036a46b103ef56e2a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #90: special case hash", + "NoBenchmark": false + }, + { + "Input": "641227ffffffff6f1b96fa5f097fcf3cc1a3c256870d45a67b83d0967d4b20c0499625479e161dacd4db9d9ce64854c98d922cbf212703e9654fae182df9bad242c177cf37b8193a0131108d97819edd9439936028864ac195b64fca76d9d6932927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #91: special case hash", + "NoBenchmark": false + }, + { + "Input": "958415d8ffffffffabad03e2fc662dc3ba203521177502298df56f36600e0f8b08f16b8093a8fb4d66a2c8065b541b3d31e3bfe694f6b89c50fb1aaa6ff6c9b29d6455e2d5d1779748573b611cb95d4a21f967410399b39b535ba3e5af81ca2e2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #92: special case hash", + "NoBenchmark": false + }, + { + "Input": "f1d8de4858ffffffff1281093536f47fe13deb04e1fbe8fb954521b6975420f8be26231b6191658a19dd72ddb99ed8f8c579b6938d19bce8eed8dc2b338cb5f8e1d9a32ee56cffed37f0f22b2dcb57d5c943c14f79694a03b9c5e96952575c892927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #93: special case hash", + "NoBenchmark": false + }, + { + "Input": "0927895f2802ffffffff10782dd14a3b32dc5d47c05ef6f1876b95c81fc31def15e76880898316b16204ac920a02d58045f36a229d4aa4f812638c455abe0443e74d357d3fcb5c8c5337bd6aba4178b455ca10e226e13f9638196506a19391232927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #94: special case hash", + "NoBenchmark": false + }, + { + "Input": "60907984aa7e8effffffff4f332862a10a57c3063fb5a30624cf6a0c3ac80589352ecb53f8df2c503a45f9846fc28d1d31e6307d3ddbffc1132315cc07f16dad1348dfa9c482c558e1d05c5242ca1c39436726ecd28258b1899792887dd0a3c62927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #95: special case hash", + "NoBenchmark": false + }, + { + "Input": "c6ff198484939170ffffffff0af42cda50f9a5f50636ea6942d6b9b8cd6ae1e24a40801a7e606ba78a0da9882ab23c7677b8642349ed3d652c5bfa5f2a9558fb3a49b64848d682ef7f605f2832f7384bdc24ed2925825bf8ea77dc59817257822927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #96: special case hash", + "NoBenchmark": false + }, + { + "Input": "de030419345ca15c75ffffffff8074799b9e0956cc43135d16dfbe4d27d7e68deacc5e1a8304a74d2be412b078924b3bb3511bac855c05c9e5e9e44df3d61e967451cd8e18d6ed1885dd827714847f96ec4bb0ed4c36ce9808db8f714204f6d12927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #97: special case hash", + "NoBenchmark": false + }, + { + "Input": "6f0e3eeaf42b28132b88fffffffff6c8665604d34acb19037e1ab78caaaac6ff2f7a5e9e5771d424f30f67fdab61e8ce4f8cd1214882adb65f7de94c31577052ac4e69808345809b44acb0b2bd889175fb75dd050c5a449ab9528f8f78daa10c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #98: special case hash", + "NoBenchmark": false + }, + { + "Input": "cdb549f773b3e62b3708d1ffffffffbe48f7c0591ddcae7d2cb222d1f8017ab9ffcda40f792ce4d93e7e0f0e95e1a2147dddd7f6487621c30a03d710b330021979938b55f8a17f7ed7ba9ade8f2065a1fa77618f0b67add8d58c422c2453a49a2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #99: special case hash", + "NoBenchmark": false + }, + { + "Input": "2c3f26f96a3ac0051df4989bffffffff9fd64886c1dc4f9924d8fd6f0edb048481f2359c4faba6b53d3e8c8c3fcc16a948350f7ab3a588b28c17603a431e39a8cd6f6a5cc3b55ead0ff695d06c6860b509e46d99fccefb9f7f9e101857f743002927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #100: special case hash", + "NoBenchmark": false + }, + { + "Input": "ac18f8418c55a2502cb7d53f9affffffff5c31d89fda6a6b8476397c04edf411dfc8bf520445cbb8ee1596fb073ea283ea130251a6fdffa5c3f5f2aaf75ca808048e33efce147c9dd92823640e338e68bfd7d0dc7a4905b3a7ac711e577e90e72927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #101: special case hash", + "NoBenchmark": false + }, + { + "Input": "4f9618f98e2d3a15b24094f72bb5ffffffffa2fd3e2893683e5a6ab8cf0ee610ad019f74c6941d20efda70b46c53db166503a0e393e932f688227688ba6a576293320eb7ca0710255346bdbb3102cdcf7964ef2e0988e712bc05efe16c1993452927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #102: special case hash", + "NoBenchmark": false + }, + { + "Input": "422e82a3d56ed10a9cc21d31d37a25ffffffff67edf7c40204caae73ab0bc75aac8096842e8add68c34e78ce11dd71e4b54316bd3ebf7fffdeb7bd5a3ebc1883f5ca2f4f23d674502d4caf85d187215d36e3ce9f0ce219709f21a3aac003b7a82927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #103: special case hash", + "NoBenchmark": false + }, + { + "Input": "7075d245ccc3281b6e7b329ff738fbb417a5ffffffffa0842d9890b5cf95d018677b2d3a59b18a5ff939b70ea002250889ddcd7b7b9d776854b4943693fb92f76b4ba856ade7677bf30307b21f3ccda35d2f63aee81efd0bab6972cc0795db552927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #104: special case hash", + "NoBenchmark": false + }, + { + "Input": "3c80de54cd9226989443d593fa4fd6597e280ebeffffffffc1847eb76c217a95479e1ded14bcaed0379ba8e1b73d3115d84d31d4b7c30e1f05e1fc0d5957cfb0918f79e35b3d89487cf634a4f05b2e0c30857ca879f97c771e877027355b24432927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #105: special case hash", + "NoBenchmark": false + }, + { + "Input": "de21754e29b85601980bef3d697ea2770ce891a8cdffffffffc7906aa794b39b43dfccd0edb9e280d9a58f01164d55c3d711e14b12ac5cf3b64840ead512a0a31dbe33fa8ba84533cd5c4934365b3442ca1174899b78ef9a3199f495843897722927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #106: special case hash", + "NoBenchmark": false + }, + { + "Input": "8f65d92927cfb86a84dd59623fb531bb599e4d5f7289ffffffff2f1f2f57881c5b09ab637bd4caf0f4c7c7e4bca592fea20e9087c259d26a38bb4085f0bbff1145b7eb467b6748af618e9d80d6fdcd6aa24964e5a13f885bca8101de08eb0d752927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #107: special case hash", + "NoBenchmark": false + }, + { + "Input": "6b63e9a74e092120160bea3877dace8a2cc7cd0e8426cbfffffffffafc8c3ca85e9b1c5a028070df5728c5c8af9b74e0667afa570a6cfa0114a5039ed15ee06fb1360907e2d9785ead362bb8d7bd661b6c29eeffd3c5037744edaeb9ad990c202927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #108: special case hash", + "NoBenchmark": false + }, + { + "Input": "fc28259702a03845b6d75219444e8b43d094586e249c8699ffffffffe852512e0671a0a85c2b72d54a2fb0990e34538b4890050f5a5712f6d1a7a5fb8578f32edb1846bab6b7361479ab9c3285ca41291808f27fd5bd4fdac720e5854713694c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #109: special case hash", + "NoBenchmark": false + }, + { + "Input": "1273b4502ea4e3bccee044ee8e8db7f774ecbcd52e8ceb571757ffffffffe20a7673f8526748446477dbbb0590a45492c5d7d69859d301abbaedb35b2095103a3dc70ddf9c6b524d886bed9e6af02e0e4dec0d417a414fed3807ef4422913d7c2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #110: special case hash", + "NoBenchmark": false + }, + { + "Input": "08fb565610a79baa0c566c66228d81814f8c53a15b96e602fb49ffffffffff6e7f085441070ecd2bb21285089ebb1aa6450d1a06c36d3ff39dfd657a796d12b5249712012029870a2459d18d47da9aa492a5e6cb4b2d8dafa9e4c5c54a2b9a8b2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #111: special case hash", + "NoBenchmark": false + }, + { + "Input": "d59291cc2cf89f3087715fcb1aa4e79aa2403f748e97d7cd28ecaefeffffffff914c67fb61dd1e27c867398ea7322d5ab76df04bc5aa6683a8e0f30a5d287348fa07474031481dda4953e3ac1959ee8cea7e66ec412b38d6c96d28f6d37304ea2927b10512bae3eddcfe467828128bad2903269919f7086069c8c4df6c732838c7787964eaac00e5921fb1498a60f4606766b3d9685001558d1a974e7341513e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #112: special case hash", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25000000000000000000000000000000004319055358e8617b0c46353d039cdaabffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254ed705d16f80987e2d9b1a6957d29ce22febf7d10fa515153182415c8361baaca4b1fc105ee5ce80d514ec1238beae2037a6f83625593620d460819e8682160926", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #113: k*G has a large x-coordinate", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000001000000000000000000000000fffffffffffffffffffffffcffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254ed705d16f80987e2d9b1a6957d29ce22febf7d10fa515153182415c8361baaca4b1fc105ee5ce80d514ec1238beae2037a6f83625593620d460819e8682160926", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #114: r too large", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254fffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63254e3cd8d2f81d6953b0844c09d7b560d527cd2ef67056893eadafa52c8501387d59ee41fdb4d10402ce7a0c5e3b747adfa3a490b62a6b7719068903485c0bb6dc2d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #115: r,s are large", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd909135bdb6799286170f5ead2de4f6511453fe50914f3df2de54a36383df8dd48240cd81edd91cb6936133508c3915100e81f332c4545d41189b481196851378e05b06e72d4a1bff80ea5db514aa2f93ea6dd6d9c0ae27b7837dc432f9ce89d9", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #116: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27b4577ca009376f71303fd5dd227dcef5deb773ad5f5a84360644669ca249a5b062947356748b0fc17f1704c65aa1dca6e1bfe6779756fa616d91eaad13df2c0b38c17f3d0672e7409cfc5992a99fff12b84a4f8432293b431113f1b2fb579d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #117: r and s^-1 have a large Hamming weight", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000014a03ef9f92eb268cafa601072489a56380fa0dc43171d7712813b3a19a1eb5e53e213e28a608ce9a2f4a17fd830c6654018a79b3e0263d91a8ba90622df6f2f0", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #118: small r and s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2500000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000003091194c1cba17f34e286b4833701606a41cef26177ada8850b601ea1f859e70127242fcec708828758403ce2fe501983a7984e6209f4d6b95db9ad77767f55eb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #120: small r and s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2500000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000005103c6ecceff59e71ea8f56fee3a4b2b148e81c2bdbdd39c195812c96dcfb41a72303a193dc591be150b883d770ec51ebb4ebce8b09042c2ecb16c448d8e57bf5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #122: small r and s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000063b66b829fe604638bcb2bfe8c22228be67390c20111bd2b451468927e87fb6eabc8e59c009361758b274ba2cad36b58fde485a3ed09dade76712fa9e9c4ac212", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #124: small r and s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc63255600000000000000000000000000000000000000000000000000000000000000063b66b829fe604638bcb2bfe8c22228be67390c20111bd2b451468927e87fb6eabc8e59c009361758b274ba2cad36b58fde485a3ed09dade76712fa9e9c4ac212", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #126: r is larger than n", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e250000000000000000000000000000000000000000000000000000000000000005ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc75fbd84ff2f6c24e4a33cd71c09fdcbc74a6233961b874b8c8e0eb94582092cbc50c3084fa9547afda5c66335f3f937d4c79afa120486b534139d59ae82d61ead26420", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #127: s is larger than n", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2500000000000000000000000000000000000000000000000000000000000001008f1e3c7862c58b16bb76eddbb76eddbb516af4f63f2d74d76e0d28c9bb75ea8884b959080bb30859cd53c2fb973cf14d60cdaa8ee00587889b5bc657ac588175a02ce5c1e53cb196113c78b4cb8dc7d360e5ea7850b0f6650b0c45af2c3cd7ca", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #128: small r and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25000000000000000000000000000000000000000000000000002d9b4d347952d6ef3043e7329581dbb3974497710ab11505ee1c87ff907beebadd195a0ffe6d7adf4083bd6ecbda5a77ae578e5d835fa7f74a07ebb91e0570e1ff32a563354e9925af80b09a167d9ef647df28e2d9acd0d4bc4f2deec5723818edaf9071e311f8", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #129: smallish r and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25000000000000000000000000000000000000001033e67e37b32b445580bf4eff8b748b74000000008b748b748b748b7466e769ad4a16d3dcd87129b8e91d1b4dc2569a3c9bf8c1838ca821f7ba6f000cc8679d278f3736b414a34a7c956a03770387ea85bc4f28804b4a91c9b7d65bc6434c975806795ab7d441a4e9683aeb09", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #130: 100-bit r and small s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e250000000000000000000000000000000000000000000000000000000000000100ef9f6ba4d97c09d03178fa20b4aaad83be3cf9cb824a879fec3270fc4b81ef5b4a9f7da2a6c359a16540c271774a6bf1c586357c978256f44a6496d80670968ac496e73a44563f8d56fbd7bb9e4e3ae304c86f2c508eb777b03924755beb40d4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #131: small r and 100 bit s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2500000000000000000000000000000000000000062522bbd3ecbe7c39e93e7c25ef9f6ba4d97c09d03178fa20b4aaad83be3cf9cb824a879fec3270fc4b81ef5b874146432b3cd2c9e26204c0a34136996067d466dde4917a8ff23a8e95ca106b709b3d50976ef8b385a813bc35f3a20710bdc6edd465e6f43ac4866703a6608c", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #132: 100-bit r and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc6324d5555555550000000055555555555555553ef7a8e48d07df81a693439654210c707a736d8e326a9ca62bbe25a34ea4e3633b499a96afa7aaa3fcf3fd88f8e07edeb3e45879d8622b93e818443a686e869eeda7bf9ae46aa3eafcc48a5934864627", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #133: r and s^-1 are close to n", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c700000000000000000000000000000000000000000000000000000000000000001e84d9b232e971a43382630f99725e423ec1ecb41e55172e9c69748a03f0d5988618b15b427ad83363bd041ff75fac98ef2ee923714e7d1dfe31753793c7588d4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #134: s == 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c700000000000000000000000000000000000000000000000000000000000000000e84d9b232e971a43382630f99725e423ec1ecb41e55172e9c69748a03f0d5988618b15b427ad83363bd041ff75fac98ef2ee923714e7d1dfe31753793c7588d4", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #135: s == 0", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a8555555550000000055555555555555553ef7a8e48d07df81a693439654210c700203736fcb198b15d8d7a0c80f66dddd15259240aa78d08aae67c467de04503434383438d5041ea9a387ee8e4d4e84b4471b160c6bcf2568b072f8f20e87a996", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #136: point at infinity during verify", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a878d844dc7f16b73b1f2a39730da5d8cd99fe2e70a18482384e37dcd2bfea02e1ed6572e01eb7a8d113d02c666c45ef22d3b9a6a6dea99aa43a8183c26e75d336", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #137: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a97fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192a9dec6c8257dde94110eacc8c09d2e5789cc5beb81a958b02b4d62da9599a7401466fae1614174be63970b83f6524421067b06dd6f4e9c56baca4e344fdd690f1d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #138: edge case for signature malleability", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25a17f5b75a35ed64623ca5cbf1f91951292db0c23f0c2ea24c3d0cad0988cabc083a7a618625c228940730b4fa3ee64faecbb2fc20fdde7c58b3a3f6300424dc6", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #139: u1 == 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c04ba0cba291a37db13f33bf90dab628c04ec8393a0200419e9eaa1ebcc9fb5c31f3a0a0e6823a49b625ad57b12a32d4047970fc3428f0f0049ecf4265dc12f62", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #140: u1 == n - 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70555555550000000055555555555555553ef7a8e48d07df81a693439654210c70692b6c828e0feed63d8aeaa2b7322f9ccbe8723a1ed39f229f204a434b8900efa1f6f6abcb38ea3b8fde38b98c7c271f274af56a8c5628dc3329069ae4dd5716", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #141: u2 == 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c70aaaaaaaa00000000aaaaaaaaaaaaaaaa7def51c91a0fbf034d26872ca84218e100cefd9162d13e64cb93687a9cd8f9755ebb5a3ef7632f800f84871874ccef09543ecbeaf7e8044ef721be2fb5f549e4b8480d2587404ebf7dbbef2c54bc0cb1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #142: u2 == n - 1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd710f8e3edc7c2d5a3fd23de844002bb949d9f794f6d5405f6d97c1bb03dd2bd2b975183b42551cf52f291d5c1921fd5e12f50c8c85a4beb9de03efa3f0f244862243018e6866df922dc313612020311ff21e242ce3fb15bc78c406b25ab43091", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #143: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdedffbc270f722c243069a7e5f40335a61a58525c7b4db2e7a8e269274ffe4e1bc25f1d166f3e211cdf042a26f8abf6094d48b8d17191d74ed71714927446699965d06dd6a88abfa49e8b4c5da6bb922851969adf9604b5accfb52a114e77ccdb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #144: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffda25adcae105ed7ff4f95d2344e24ee523314c3e178525d007904b68919ba4d538fe5e88243a76e41a004236218a3c3a2d6eee398a23c3a0b008d7f0164cbc0ca98a20d1bdcf573513c7cfd9b83c63e3a82d40127c897697c86b8cb387af7f240", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #145: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd2e4348c645707dce6760d773de3f3e87346924b2f64bd3dd0297e766b5805ebb02148256b530fbc470c7b341970b38243ecee6d5a840a37beca2efb37e8dff2cc0adbea0882482a7489ca703a399864ba987eeb6ddb738af53a83573473cb30d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #146: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd348c673b07dce3920d773de3f3e87408869e916dbcf797d8f9684fb67753d1dca34db012ce6eda1e9c7375c5fcf3e54ed698e19615124273b3a621d021c76f8e777458d6f55a364c221e39e1205d5510bb4fbb7ddf08d8d8fdde13d1d6df7f14", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #147: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd6918ce760fb9c7241aee7bc7e7d0e8110d3d22db79ef2fb1f2d09f6ceea7a3b8b97af3fe78be15f2912b6271dd8a43badb6dd2a1b315b2ce7ae37b4e7778041d930d71ee1992d2466495c42102d08e81154c305307d1dcd52d0fa4c479b278e7", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #148: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd73b3c694391d8eadde3f3e874089464715ac20e4c126bbf6d864d648969f5b5a81e7198a3c3f23901cedc7a1d6eff6e9bf81108e6c35cd8559139af3135dbcbb9ef1568530291a8061b90c9f4285eefcba990d4570a4e3b7b737525b5d580034", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #149: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb07ac7a86948c2c2989a16db1930ef1b89ce112595197656877e53c41457f28ab4d792ca121d1dba39cb9de645149c2ab573e8becc6ddff3cc9960f188ddf737f90ba23664153e93262ff73355415195858d7be1315a69456386de68285a3c8", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #150: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd27e4d82cb6c061dd9337c69bf9332ed3d198662d6f2299443f62c861187db648518412b69af43aae084476a68d59bbde51fbfa9e5be80563f587c9c2652f88ef2d3b90d25baa6bdb7b0c55e5240a3a98fbc24afed8523edec1c70503fc10f233", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #151: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffde7c5cf3aac2e88923b77850515fff6a12d13b356dfe9ec275c3dd81ae94609a4a08f14a644b9a935dffea4761ebaf592d1f66fe6cd373aa7f5d370af34f8352da54b5bc4025cf335900a914c2934ec2fec7a396d0a7affcad732a5741c7aaaf5", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #152: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdc77838df91c1e953e016e10bddffea2317f9fee32bacfe553cede9e57a748f68ccf2296a6a89b62b90739d38af4ae3a20e9f45715b90044639241061e33f8f8caace0046491eeaa1c6e9a472b96d88f4af83e7ff1bb84438c7e058034412ae08", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #153: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd8ef071c02383d2a6c02dc217bbffd446730d0318b0425e2586220907f885f97f94b0fc1525bcabf82b1f34895e5819a06c02b23e04002276e165f962c86e3927be7c2ab4d0b25303204fb32a1f8292902792225e16a6d2dbfb29fbc89a9c3376", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #154: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5668aaa0b545bbf9a044a32399ffbe69ce20074e34d7bdf5cf56282a769763965351f37e1de0c88c508527d89882d183ccdcf2efca407edb0627cadfd16de6ec44b4b57cdf960d32ebcc4c97847eed218425853b5b675eb781b766a1a1300349", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #155: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdd12d6e56882f6c0027cae91a27127728f7fddf478fb4fdc2b65f40a60b0eb952748bbafc320e6735cb64019710a269c6c2b5d147bdc831325cb2fb276ac971a69d655e9a755bc9d800ad21ee3fd4d980d93a7a49a8c5ccd37005177578f51163", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #156: edge case for u1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffffaaaaaaaaffffffffffffffffe9a2538f37b28a2c513dee40fecbb71a14b3bbd75c5e1c0c36535a934d4ab85112410b3b90fa97a31c33038964fd85cc112f7d837f8f9c36b460d636c965a5f818f2b50c5d00fb3f9705561dd6631883", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #157: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdb62f26b5f2a2b26f6de86d42ad8a13da3ab3cccd0459b201de009e526adf21f2d823533c04cd8edc6d6f950a8e08ade04a9bafa2f14a590356935671ae9305bf43178d1f88b6a57a96924c265f0ddb75b58312907b195acb59d7797303123775", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #158: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbb1d9ac949dd748cd02bbbe749bd351cd57b38bb61403d700686aa7b4c90851edb2b3408b3167d91030624c6328e8ce3ec108c105575c2f3d209b92e654bab69c34318139c50b0802c6e612f0fd3189d800df7c996d5d7b7c3d6be82836fa258", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #159: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd66755a00638cdaec1c732513ca0234ece52545dac11f816e818f725b4f60aaf209179ce7c59225392216453b2ac1e9d178c24837dfae26bc1dd7ab60638527425556b42e330289f3b826b2db7a86d19d45c2860a59f2be1ddcc3b691f95a9255", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #160: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd55a00c9fcdaebb6032513ca0234ecfffe98ebe492fdf02e48ca48e982beb366901959fb8deda56e5467b7e4b214ea4c2d0c2fb29d70ff19b6b1eccebd6568d7ed9dbd77a918297fd970bff01e1343f6925167db5a14d098a211c39cc3a413398", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #161: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdab40193f9b5d76c064a27940469d9fffd31d7c925fbe05c919491d3057d66cd2567f1fdc387e5350c852b4e8f8ba9d6d947e1c5dd7ccc61a5938245dd6bcab3a9960bebaf919514f9535c22eaaf0b5812857970e26662267b1f3eb1011130a11", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #162: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdca0234ebb5fdcb13ca0234ecffffffffcb0dadbbc7f549f8a26b4408d0dc86003499f974ff4ca6bbb2f51682fd5f51762f9dd6dd2855262660b36d46d3e4bec2f498fae2487807e220119152f0122476c64d4fa46ddce85c4546630f0d5c5e81", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #163: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff3ea3677e082b9310572620ae19933a9e65b285598711c77298815ad32c5c01662cf00c1929596257db13b26ecf30d0f3ec4b9f0351b0f27094473426e986a086060d086eee822ddd2fc744247a0154b57f7a69c51d9fdafa484e4ac7", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #164: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd266666663bbbbbbbe6666666666666665b37902e023fab7c8f055d86e5cc41f491d4cba813a04d86dbae94c23be6f52c15774183be7ba5b2d9f3cf010b160501900b8adfea6491019a9ac080d516025a541bf4b952b0ad7be4b1874b02fd544a", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #165: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff36db6db7a492492492492492146c573f4c6dfc8d08a443e258970b09ef7fd0a3a36386638330ecad41e1a3b302af36960831d0210c614b948e8aa124ef0d6d800e4047d6d3c1be0fdeaf11fcd8cab5ab59c730eb34116e35a8c7d098", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #166: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdbfffffff2aaaaaab7fffffffffffffffc815d0e60b3e596ecb1ad3a27cfd49c4a521dab13cc9152d8ca77035a607fea06c55cc3ca5dbeb868cea92eafe93df2a7bfb9b28531996635e6a5ccaa2826a406ce1111bdb9c2e0ca36500418a2f43de", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #167: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd7fffffff55555555ffffffffffffffffd344a71e6f651458a27bdc81fd976e37474d58a4eec16e0d565f2187fe11d4e8e7a2683a12f38b4fc01d1237a81a10976e55f73bb7cdda46bdb67ef77f6fd2969df2b67920fb5945fde3a517a6ded4cd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #168: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd3fffffff800000007fffffffffffffffde737d56d38bcf4279dce5617e3192aa692da5cd4309d9a6e5cb525c37da8fa0879f7b57208cdabbf47d223a5b23a62140e0daa78cfdd207a7389aaed61738b17fc5fc3e6a5ed3397d2902e9125e6ab4", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #169: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd5d8ecd64a4eeba466815ddf3a4de9a8e6abd9c5db0a01eb80343553da648428f85689b3e0775c7718a90279f14a8082cfcd4d1f1679274f4e9b8805c570a0670167fcc5ca734552e09afa3640f4a034e15b9b7ca661ec7ff70d3f240ebe705b1", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #170: edge case for u2", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569f21d907e3890916dc4fa1f4703c1e50d3f54ddf7383e44023a41de562aa18ed80158137755b901f797a90d4ca8887e023cb2ef63b2ba2c0d455edaef42cf237e2a964fc00d377a8592b8b61aafa7a4aaa7c7b9fd2b41d6e0e17bd1ba5677edcd", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #171: point duplication during verification", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256f2347cab7dd76858fe0555ac3bc99048c4aacafdfb6bcbe05ea6c42c4934569f21d907e3890916dc4fa1f4703c1e50d3f54ddf7383e44023a41de562aa18ed80158137755b901f797a90d4ca8887e023cb2ef63b2ba2c0d455edaef42cf237ed569b03ef2c8857b6d4749e550585b5558384603d4be291f1e842e45a9881232", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #172: duplication bug", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e250000000000000000000000000000000000000000000000000000000000000001555555550000000055555555555555553ef7a8e48d07df81a693439654210c7038a084ffccc4ae2f8204be2abca9fb8ad4ab283b2aa50f13b6bb2347adabc69ca699799b77b1cc6dad271e88b899c12931986e958e1f5cf5653dddf7389365e2", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #173: point with x-coordinate 0", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25555555550000000055555555555555553ef7a8e48d07df81a693439654210c703333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aa9664ce273320d918d8bdb2e61201b4549b36b7cdc54e33b84adb6f2c10aac831e49e68831f18bda2973ac3d76bfbc8c5ee1cceed2dd862e2dc7c915c736cef1f4", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #175: comparison with point at infinity ", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978555555550000000055555555555555553ef7a8e48d07df81a693439654210c70961691a5e960d07a301dbbad4d86247ec27d7089faeb3ddd1add395efff1e0fe7254622cc371866cdf990d2c5377790e37d1f1519817f09a231bd260a9e78aeb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #176: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc5d283e13ce8ca60da868e3b0fb33e6b4f1074793274e2928250e71e2aca63e9c214dc74fa25371fb4d9e506d418ed9a1bfd6d0c8bb6591d3e0f44505a84886ce", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #177: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc47669978cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa70fc351da038ae0803bd1d86514ae0462f9f8216551d9315aa9d297f792eef6a341c74eed786f2d33da35360ca7aa925e753f00d6077a1e9e5fc339d634019c73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #178: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc476699783333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaa1e34c8f16d138673fee55c080547c2bfd4de7550065f638322bba9430ce4b60662be9bb512663aa4d7df8ab3f3b4181c5d44a7bdf42436620b7d8a6b81ac936", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #179: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997849249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c1857e1a8a8338d7fd8cf41d322a302d2078a87a23c7186150ed7cda6e52817c1bdfd0a9135a89d21ce821e29014b2898349254d748272b2d4eb8d59ee34c615377f", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #180: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e257cf27b188d034f7e8a52380304b51ac3c08969e277f21b35a60b48fc4766997816a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bb5c19fe227a61abc65c61ee7a018cc9571b2c6f663ea33583f76a686f64be078b7b4a0d734940f613d52bc48673b457c2cf78492490a5cc5606c0541d17b24ddb", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #181: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296555555550000000055555555555555553ef7a8e48d07df81a693439654210c70db02d1f3421d600e9d9ef9e47419dba3208eed08c2d4189a5db63abeb2739666e0ed26967b9ada9ed7ffe480827f90a0d210d5fd8ec628e31715e6b24125512a", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #182: extreme value for k and edgecase s", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b6db6db6249249254924924924924924625bd7a09bec4ca81bcdd9f8fd6b63cc6222d1962655501893c29e441395b6c05711bd3ed5a0ef72cfab338b88229c4baaae079cb44a1af070362aaa520ee24cac2626423b0bf81af1c54311d8e2fd23", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #183: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296cccccccc00000000cccccccccccccccc971f2ef152794b9d8fc7d568c9e8eaa74ccfa24c67f3def7fa81bc99c70bb0419c0952ba599f4c03361da184b04cdca5db76b797f7f41d9c729a2219478a7e629728df870800be8cf6ca7a0a82153bfa", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #184: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2963333333300000000333333333333333325c7cbbc549e52e763f1f55a327a3aaaea1c72c91034036bac71402b6e9ecc4af3dbde7a99dc574061e99fefff9d84dab7dd057e75b78ac6f56e34eb048f0a9d29d5d055408c90d02bc2ea918c18cb63", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #185: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29649249248db6db6dbb6db6db6db6db6db5a8b230d0b2b51dcd7ebf0c9fef7c185c2879a66d86cb20b820b7795da2da62b38924f7817d1cd350d936988e90e79bc5431a7268ff6931c7a759de024eff90bcb0177216db6fd1f3aaaa11fa3b6a083", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #186: extreme value for k and s^-1", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e256b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c29616a4502e2781e11ac82cbc9d1edd8c981584d13e18411e2f6e0478c34416e3bbab1c0f273f74abc2b848c75006f2ef3c54c26df27711b06558f455079aee0ba3df510f2ecef6d9a05997c776f14ad6456c179f0a13af1771e4d6c37fa48b47f2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #187: extreme value for k", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #188: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c2964fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #189: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #190: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e25acd155416a8b77f34089464733ff7cd39c400e9c69af7beb9eac5054ed2ec72c249249246db6db6ddb6db6db6db6db6dad4591868595a8ee6bf5f864ff7be0c26b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296b01cbd1c01e58065711814b583f061e9d431cca994cea1313449bf97c840ae0a", + "Expected": "", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #191: testing point duplication", + "NoBenchmark": false + }, + { + "Input": "bb5a52f42f9c9261ed4361f59422a1e30036e7c32b270c8807a419feca605023a8ea150cb80125d7381c4c1f1da8e9de2711f9917060406a73d7904519e51388f3ab9fa68bd47973a73b2d40480c2ba50c22c9d76ec217257288293285449b8604aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #269: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "532eaabd9574880dbf76b9b8cc00832c20a6ec113d682299550d7a6e0f345e2530e782f964b2e2ff065a051bc7adc20615d8c43a1365713c88268822c253bcce5b16df652aa1ecb2dc8b46c515f9604e2e84cacfa7c6eec30428d2d3f4e08ed504aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #270: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855b292a619339f6e567a305c951c0dcbcc42d16e47f219f9e98e76e09d8770b34a0177e60492c5a8242f76f07bfe3661bde59ec2a17ce5bd2dab2abebdf89a62e204aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #271: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "de47c9b27eb8d300dbb5f2c353e632c393262cf06340c4fa7f1b40c4cbd36f90986e65933ef2ed4ee5aada139f52b70539aaf63f00a91f29c69178490d57fb713dafedfb8da6189d372308cbf1489bbbdabf0c0217d1c0ff0f701aaa7a694b9c04aaec73635726f213fb8a9e64da3b8632e41495a944d0045b522eba7240fad587d9315798aaa3a5ba01775787ced05eaaf7b4e09fc81d6d1aa546e8365d525d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #272: pseudorandom signature", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d434e262a49eab7781e353a3565e482550dd0fd5defa013c7f29745eff3569f19b0c0a93f267fb6052fd8077be769c2b98953195d7bc10de844218305c6ba17a4f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #288: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f910fe774355c04d060f76d79fd7a772e421463489221bf0a33add0be9b1979110b500dcba1c69a8fbd43fa4f57f743ce124ca8b91a1f325f3fac6181175df557374f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #289: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91bb40bf217bed3fb3950c7d39f03d36dc8e3b2cd79693f125bfd06595ee1135e3541bf3532351ebb032710bdb6a1bf1bfc89a1e291ac692b3fa4780745bb556774f337ccfd67726a805e4f1600ae2849df3807eca117380239fbd816900000000ed9dea124cc8c396416411e988c30f427eb504af43a3146cd5df7ea60666d685", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #290: x-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91664eb7ee6db84a34df3c86ea31389a5405badd5ca99231ff556d3e75a233e73a59f3c752e52eca46137642490a51560ce0badc678754b8f72e51a2901426a1bd3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #291: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f914cd0429bbabd2827009d6fcd843d4ce39c3e42e2d1631fd001985a79d1fd8b439638bf12dd682f60be7ef1d0e0d98f08b7bca77a1a2b869ae466189d2acdabe33cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #292: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91e56c6ea2d1b017091c44d8b6cb62b9f460e3ce9aed5e5fd41e8added97c56c04a308ec31f281e955be20b457e463440b4fcf2b80258078207fc1378180f89b553cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f49726500493584fa174d791c72bf2ce3880a8960dd2a7c7a1338a82f85a9e59cdbde80000000", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #293: y-coordinate of the public key has many trailing 0's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f911158a08d291500b4cabed3346d891eee57c176356a2624fb011f8fbbf3466830228a8c486a736006e082325b85290c5bc91f378b75d487dda46798c18f2855193cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #294: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b1db9289649f59410ea36b0c0fc8d6aa2687b29176939dd23e0dde56d309fa9d3e1535e4280559015b0dbd987366dcf43a6d1af5c23c7d584e1c3f48a12513363cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #295: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7b16e762286cb96446aa8d4e6e7578b0a341a79f2dd1a220ac6f0ca4e24ed86ddc60a700a139b04661c547d07bbb0721780146df799ccf55e55234ecb8f12bc3cf03d614d8939cfd499a07873fac281618f06b8ff87e8015c3f4972650049357b05e8b186e38d41d31c77f5769f22d58385ecc857d07a561a6324217fffffff", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #296: y-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d82a7c2717261187c8e00d8df963ff35d796edad36bc6e6bd1c91c670d9105b43dcabddaf8fcaa61f4603e7cbac0f3c0351ecd5988efb23f680d07debd1399292829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #297: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f915eb9c8845de68eb13d5befe719f462d77787802baff30ce96a5cba063254af782c026ae9be2e2a5e7ca0ff9bbd92fb6e44972186228ee9a62b87ddbe2ef66fb52829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #298: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9196843dd03c22abd2f3b782b170239f90f277921becc117d0404a8e4e36230c28f2be378f526f74a543f67165976de9ed9a31214eb4d7e6db19e1ede123dd991d2829c31faa2e400e344ed94bca3fcd0545956ebcfe8ad0f6dfa5ff8effffffffa01aafaf000e52585855afa7676ade284113099052df57e7eb3bd37ebeb9222e", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #299: x-coordinate of the public key has many trailing 1's", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91766456dce1857c906f9996af729339464d27e9d98edc2d0e3b760297067421f6402385ecadae0d8081dccaf5d19037ec4e55376eced699e93646bfbbf19d0b41fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #300: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91c605c4b2edeab20419e6518a11b2dbc2b97ed8b07cced0b19c34f777de7b9fd9edf0f612c5f46e03c719647bc8af1b29b2cde2eda700fb1cff5e159d47326dbafffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #301: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d48b68e6cabfe03cf6141c9ac54141f210e64485d9929ad7b732bfe3b7eb8a84feedae50c61bd00e19dc26f9b7e2265e4508c389109ad2f208f0772315b6c941fffffff948081e6a0458dd8f9e738f2665ff9059ad6aac0708318c4ca9a7a4f55a8abcba2dda8474311ee54149b973cae0c0fb89557ad0bf78e6529a1663bd73", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #302: x-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91b7c81457d4aeb6aa65957098569f0479710ad7f6595d5874c35a93d12a5dd4c7b7961a0b652878c2d568069a432ca18a1a9199f2ca574dad4b9e3a05c0a1cdb300000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #303: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f916b01332ddb6edfa9a30a1321d5858e1ee3cf97e263e669f8de5e9652e76ff3f75939545fced457309a6a04ace2bd0f70139c8f7d86b02cb1cc58f9e69e96cd5a00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #304: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91efdb884720eaeadc349f9fc356b6c0344101cd2fd8436b7d0e6a4fb93f106361f24bee6ad5dc05f7613975473aadf3aacba9e77de7d69b6ce48cb60d8113385d00000003fa15f963949d5f03a6f5c7f86f9e0015eeb23aebbff1173937ba748e1099872070e8e87c555fa13659cca5d7fadcfcb0023ea889548ca48af2ba7e71", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #305: x-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9131230428405560dcb88fb5a646836aea9b23a23dd973dcbe8014c87b8b20eb070f9344d6e812ce166646747694a41b0aaf97374e19f3c5fb8bd7ae3d9bd0beffbcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #306: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91caa797da65b320ab0d5c470cda0b36b294359c7db9841d679174db34c4855743cf543a62f23e212745391aaf7505f345123d2685ee3b941d3de6d9b36242e5a0bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #307: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f917e5f0ab5d900d3d3d7867657e5d6d36519bc54084536e7d21c336ed8001859459450c07f201faec94b82dfb322e5ac676688294aad35aa72e727ff0b19b646aabcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015000000001352bb4a0fa2ea4cceb9ab63dd684ade5a1127bcf300a698a7193bc2", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #308: y-coordinate of the public key is small", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91d7d70c581ae9e3f66dc6a480bf037ae23f8a1e4a2136fe4b03aa69f0ca25b35689c460f8a5a5c2bbba962c8a3ee833a413e85658e62a59e2af41d9127cc47224bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #309: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f91341c1b9ff3c83dd5e0dfa0bf68bcdf4bb7aa20c625975e5eeee34bb396266b3472b69f061b750fd5121b22b11366fad549c634e77765a017902a67099e0a4469bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #310: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a9bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af015fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "0000000000000000000000000000000000000000000000000000000000000001", + "Gas": 3450, + "Name": "wycheproof/ecdsa_webcrypto_test.json EcdsaP1363Verify SHA-256 #311: y-coordinate of the public key is large", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a90000000000000000000000000000000000000000000000000000000000000000fffffffeecad44b6f05d15b33146549c2297b522a5eed8430cff596758e6c43d", + "Expected": "", + "Gas": 3450, + "Name": "invalid public key x param errors", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a9bcbb2914c79f045eaa6ecbbc612816b3be5d2d6796707d8125e9f851c18af0150000000000000000000000000000000000000000000000000000000000000000", + "Expected": "", + "Gas": 3450, + "Name": "invalid public key y param errors", + "NoBenchmark": false + }, + { + "Input": "2f77668a9dfbf8d5848b9eeb4a7145ca94c6ed9236e4a773f6dcafa5132b2f9170bebe684cdcb5ca72a42f0d873879359bd1781a591809947628d313a3814f67aec03aca8f5587a4d535fa31027bbe9cc0e464b1c3577f4c2dcde6b2094798a900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "Expected": "", + "Gas": 3450, + "Name": "reference point errors", + "NoBenchmark": false + } + ] +} diff --git a/tests/test_precompiles.nim b/tests/test_precompiles.nim index 20289b3a3b..a03efb792c 100644 --- a/tests/test_precompiles.nim +++ b/tests/test_precompiles.nim @@ -25,7 +25,7 @@ import proc initAddress(i: byte): Address = result.data[19] = i -template doTest(fixture: JsonNode; vmState: BaseVMState; address: PrecompileAddresses): untyped = +template doTest(fixture: JsonNode; vmState: BaseVMState; precompile: Precompiles): untyped = for test in fixture: let expectedErr = test.hasKey("ExpectedError") @@ -41,7 +41,7 @@ template doTest(fixture: JsonNode; vmState: BaseVMState; address: PrecompileAddr nonce: 0, gasPrice: 1.GasInt, gasLimit: 1_000_000_000.GasInt, - to: Opt.some initAddress(address.byte), + to: Opt.some precompileAddrs[precompile], value: 0.u256, chainId: 1.u256, payload: if dataStr.len > 0: dataStr.hexToSeqByte else: @[] @@ -96,6 +96,7 @@ proc testFixture(fixtures: JsonNode, testStatusIMPL: var TestStatus) = of "blspairing": data.doTest(vmState, paBlsPairing) of "blsmapg1": data.doTest(vmState, paBlsMapG1) of "blsmapg2": data.doTest(vmState, paBlsMapG2) + of "p256verify": data.doTest(vmState, paP256Verify) else: echo "Unknown test vector '" & $label & "'" testStatusIMPL = SKIPPED From e7c6971e476a253cca33731e1cca2c9f0dfdb617 Mon Sep 17 00:00:00 2001 From: Advaita Saha Date: Tue, 24 Jun 2025 09:42:10 +0530 Subject: [PATCH 102/138] add: CLZ opcode (#3423) --- execution_chain/evm/interpreter/gas_costs.nim | 1 + execution_chain/evm/interpreter/op_codes.nim | 5 +- .../op_handlers/oph_arithmetic.nim | 21 +++++- .../evm/interpreter/op_handlers/oph_defs.nim | 6 ++ tests/test_op_arith.nim | 72 +++++++++++++++++++ 5 files changed, 101 insertions(+), 4 deletions(-) diff --git a/execution_chain/evm/interpreter/gas_costs.nim b/execution_chain/evm/interpreter/gas_costs.nim index c640216a3b..0f646e13be 100644 --- a/execution_chain/evm/interpreter/gas_costs.nim +++ b/execution_chain/evm/interpreter/gas_costs.nim @@ -520,6 +520,7 @@ template gasCosts(fork: EVMFork, prefix, ResultGasCostsName: untyped) = Shl: fixed GasVeryLow, Shr: fixed GasVeryLow, Sar: fixed GasVeryLow, + Clz: fixed GasVeryLow, # 20s: SHA3 Sha3: memExpansion `prefix gasSha3`, diff --git a/execution_chain/evm/interpreter/op_codes.nim b/execution_chain/evm/interpreter/op_codes.nim index 7c4b457921..6da4642d68 100644 --- a/execution_chain/evm/interpreter/op_codes.nim +++ b/execution_chain/evm/interpreter/op_codes.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2018-2024 Status Research & Development GmbH +# Copyright (c) 2018-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -58,8 +58,9 @@ type Shl = 0x1B, ## Shift left Shr = 0x1C, ## Logical shift right Sar = 0x1D, ## Arithmetic shift right + Clz = 0x1E, ## Count leading zeros - Nop0x1E, Nop0x1F, ## .. + Nop0x1F, ## .. # 20s: SHA3 Sha3 = 0x20, ## Compute Keccak-256 hash. diff --git a/execution_chain/evm/interpreter/op_handlers/oph_arithmetic.nim b/execution_chain/evm/interpreter/op_handlers/oph_arithmetic.nim index befdd3ddbe..07ecacd1f4 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_arithmetic.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_arithmetic.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2018-2024 Status Research & Development GmbH +# Copyright (c) 2018-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -277,6 +277,17 @@ proc sarOp(cpt: VmCpt): EvmResultVoid = cpt.stack.binaryWithTop(sar256) +proc clzOp(cpt: VmCpt): EvmResultVoid = + ## 0x1e, Count Leading Zeros + template clz256(top, value, toStackElem) = + if value.isZero: + toStackElem(256.u256, top) + else: + let count = value.leadingZeros() + toStackElem(count.u256, top) + + cpt.stack.unaryWithTop(clz256) + # ------------------------------------------------------------------------------ # Public, op exec table entries # ------------------------------------------------------------------------------ @@ -460,7 +471,13 @@ const forks: VmOpConstantinopleAndLater, name: "sarOp", info: "Arithmetic shift right", - exec: sarOp)] + exec: sarOp), + + (opCode: Clz, ## CLZ (Count Leading Zeros) + forks: VmOpOsakaAndLater, ## Or a newer fork gate, if appropriate + name: "clzOp", + info: "Count leading zero bits in a 256-bit word", + exec: clzOp)] # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/evm/interpreter/op_handlers/oph_defs.nim b/execution_chain/evm/interpreter/op_handlers/oph_defs.nim index 7319542e0f..860796fefc 100644 --- a/execution_chain/evm/interpreter/op_handlers/oph_defs.nim +++ b/execution_chain/evm/interpreter/op_handlers/oph_defs.nim @@ -82,6 +82,12 @@ const VmOpCancunAndLater* = VmOpShanghaiAndLater - {FkShanghai} + VmOpPragueAndLater* = + VmOpCancunAndLater - {FkCancun} + + VmOpOsakaAndLater* = + VmOpPragueAndLater - {FkPrague} + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/tests/test_op_arith.nim b/tests/test_op_arith.nim index 5744d71edf..fee4cbe21e 100644 --- a/tests/test_op_arith.nim +++ b/tests/test_op_arith.nim @@ -463,4 +463,76 @@ proc opArithMain*() = Byte stack: "0x0000000000000000000000000000000000000000000000000000000000000000" + assembler: + title: "Clz_Zero" + code: + Push32 "0x0000000000000000000000000000000000000000000000000000000000000000" + Clz + stack: "0x0100" # 256 in hex + fork: Osaka + + assembler: + title: "Clz_One" + code: + Push32 "0x0000000000000000000000000000000000000000000000000000000000000001" + Clz + stack: "0x00FF" # 255 in hex + fork: Osaka + + assembler: + title: "Clz_MsbSet" + code: + Push32 "0x8000000000000000000000000000000000000000000000000000000000000000" + Clz + stack: "0x00" + fork: Osaka + + assembler: + title: "Clz_OneBitBelowMsb" + code: + Push32 "0x4000000000000000000000000000000000000000000000000000000000000000" + Clz + stack: "0x01" + fork: Osaka + + assembler: + title: "Clz_7Fs" + code: + Push32 "0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + Clz + stack: "0x01" + fork: Osaka + + assembler: + title: "Clz_Full" + code: + Push32 "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + Clz + stack: "0x00" + fork: Osaka + + assembler: + title: "Clz_Zero_32Bytes" + fork: Osaka + code: + Push32 "0x0000000000000000000000000000000000000000000000000000000000000000" + Clz + stack: "0x100" + + assembler: + title: "Clz_Zero_Push1" + fork: Osaka + code: + Push1 "0x00" + Clz + stack: "0x100" + + assembler: + title: "Clz_Zero_PushN_FewZeroBytes" + fork: Osaka + code: + Push5 "0x0000000000" + Clz + stack: "0x100" + opArithMain() From 6228fb96026f2e8085d11abe372b1bd3020cd887 Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 24 Jun 2025 17:26:00 +0700 Subject: [PATCH 103/138] Reduce declared/import but not used (#3424) --- execution_chain/evm/secp256r1verify.nim | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/execution_chain/evm/secp256r1verify.nim b/execution_chain/evm/secp256r1verify.nim index c1f7568327..b0658e3b0c 100644 --- a/execution_chain/evm/secp256r1verify.nim +++ b/execution_chain/evm/secp256r1verify.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2018-2025 Status Research & Development GmbH +# Copyright (c) 2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or # http://www.apache.org/licenses/LICENSE-2.0) @@ -10,7 +10,7 @@ import libp2p/crypto/ecnist, - bearssl/[ec, hash] + bearssl/ec proc isInfinityByte*(data: openArray[byte]): bool = ## Check if all values in ``data`` are zero. @@ -28,9 +28,7 @@ proc verifyRaw*[T: byte | char]( ## Return ``true`` if message verification succeeded, ``false`` if ## verification failed. doAssert((not isNil(sig)) and (not isNil(pubkey))) - var hc: HashCompatContext - var hash: array[32, byte] - var impl = ecGetDefault() + let impl = ecGetDefault() if pubkey.key.curve in EcSupportedCurvesCint: let res = ecdsaI31VrfyRaw( impl, From 5775210dfbd71d375acd588681f23edce5d6f50c Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Wed, 25 Jun 2025 18:42:27 +0200 Subject: [PATCH 104/138] Bump nim-eth module (#3426) --- vendor/nim-eth | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nim-eth b/vendor/nim-eth index fb3fde7832..ac94813ab3 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit fb3fde7832f12dea1673da0072a87fac0be4685a +Subproject commit ac94813ab385a490bac40affbb351593edaf5d9f From 25ad87b573b49eeff520dbb4750028f067f6b4e1 Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 27 Jun 2025 19:59:48 +0700 Subject: [PATCH 105/138] eth_sendRawTransaction return inner tx hash and not the pooled tx hash (#3430) --- execution_chain/rpc/server_api.nim | 2 +- tests/test_rpc.nim | 49 +++++++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/execution_chain/rpc/server_api.nim b/execution_chain/rpc/server_api.nim index cd7a6affd4..5258e69470 100644 --- a/execution_chain/rpc/server_api.nim +++ b/execution_chain/rpc/server_api.nim @@ -317,7 +317,7 @@ proc setupServerAPI*(api: ServerAPIRef, server: RpcServer, ctx: EthContext) = ## Note: Use eth_getTransactionReceipt to get the contract address, after the transaction was mined, when you created a contract. let pooledTx = decodePooledTx(txBytes) - txHash = computeRlpHash(pooledTx) + txHash = computeRlpHash(pooledTx.tx) sender = pooledTx.tx.recoverSender().get() api.txPool.addTx(pooledTx).isOkOr: diff --git a/tests/test_rpc.nim b/tests/test_rpc.nim index 3210a98469..b993abbada 100644 --- a/tests/test_rpc.nim +++ b/tests/test_rpc.nim @@ -7,7 +7,7 @@ import chronicles, - std/[json, typetraits], + std/[json, typetraits, math], asynctest, web3/eth_api, stew/byteutils, @@ -19,6 +19,9 @@ import ../execution_chain/db/[ledger, storage_types], ../execution_chain/sync/wire_protocol, ../execution_chain/core/[tx_pool, chain, pow/difficulty], + ../execution_chain/core/pooled_txs_rlp, + ../execution_chain/core/lazy_kzg as kzg, + ../execution_chain/core/eip4844, ../execution_chain/utils/utils, ../execution_chain/[common, rpc], ../execution_chain/rpc/rpc_types, @@ -147,6 +150,43 @@ func makeTx( inc env.nonce signTransaction(tx, signerKey, eip155 = true) +proc makeBlobTx(env: var TestEnv, nonce: int): PooledTransaction = + const + source = address"0x0000000000000000000000000000000000000001" + storageKey= default(Bytes32) + accesses = @[AccessPair(address: source, storageKeys: @[storageKey])] + blob = default(kzg.KzgBlob) + blobs = @[pooled_txs.KzgBlob(blob.bytes)] + + let + ctx = env.ctx + acc = ctx.am.getAccount(signer).tryGet() + commitment = blobToKzgCommitment(blob).expect("good blob") + proof = computeBlobKzgProof(blob, commitment).expect("good commitment") + digest = kzgToVersionedHash(commitment.bytes) + + utx = Transaction( + txType: TxEip4844, + chainId: env.chainId, + nonce: AccountNonce(nonce), + gasPrice: 70000.GasInt, + gasLimit: 123457.GasInt, + to: Opt.some(zeroAddress), + maxPriorityFeePerGas:GasInt(10 ^ 9), + maxFeePerGas: GasInt(10 ^ 9), + maxFeePerBlobGas: 1.u256, + accessList: accesses, + versionedHashes: @[digest]) + + let tx = signTransaction(utx, acc.privateKey, eip155 = true) + + PooledTransaction( + tx: tx, + blobsBundle: BlobsBundle( + blobs: blobs, + commitments: @[pooled_txs.KzgCommitment(commitment.bytes)], + proofs: @[pooled_txs.KzgProof(proof.bytes)])) + proc setupEnv(envFork: HardFork = MergeFork): TestEnv = doAssert(envFork >= MergeFork) @@ -416,6 +456,13 @@ proc rpcMain*() = const expHash = hash32"0xeea79669dd904921d203fb720c7228f5c7854e5a768248f494f36fa68c83c191" check txHash == expHash + let + blobTx = env.makeBlobTx(4) + blobTxBytes = rlp.encode(blobTx) + blobTxHash = await client.eth_sendRawTransaction(blobTxBytes) + expected = computeRlpHash(blobTx.tx) # use inner tx hash + check expected == blobTxHash # should return inner tx hash + test "eth_call": let ec = TransactionArgs( `from`: Opt.some(signer), From fae85d9342b620eeaa382612aaf574d9d8f64b43 Mon Sep 17 00:00:00 2001 From: andri lim Date: Sat, 28 Jun 2025 23:05:31 +0700 Subject: [PATCH 106/138] Increase rpc http server chunk size to 1 MBytes (#3417) --- execution_chain/rpc.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/execution_chain/rpc.nim b/execution_chain/rpc.nim index b10d7d56b9..3109da9b67 100644 --- a/execution_chain/rpc.nim +++ b/execution_chain/rpc.nim @@ -31,7 +31,7 @@ export {.push gcsafe, raises: [].} -const DefaultChunkSize = 8192 +const DefaultChunkSize = 1024*1024 func serverEnabled(conf: NimbusConf): bool = conf.httpServerEnabled or From 0491e0e644767ad8a2e60f67a86f88fc6daca4bd Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 30 Jun 2025 16:24:35 +0700 Subject: [PATCH 107/138] Transaction hashes broadcast handler: use inner tx hash + PooledTransaction size (#3432) * Transaction hashes broadcast handler: use inner tx hash + PooledTransaction size * Improves logging * Fix --- .../beacon/api_handler/api_forkchoice.nim | 10 ++++--- .../chain/forked_chain/chain_serialize.nim | 14 ++++++++++ .../core/executor/process_block.nim | 9 +++++-- execution_chain/core/tx_pool/tx_desc.nim | 27 +++++++++++++------ .../sync/wire_protocol/broadcast.nim | 14 +++++----- 5 files changed, 54 insertions(+), 20 deletions(-) diff --git a/execution_chain/beacon/api_handler/api_forkchoice.nim b/execution_chain/beacon/api_handler/api_forkchoice.nim index aa183db0ea..ead419e28b 100644 --- a/execution_chain/beacon/api_handler/api_forkchoice.nim +++ b/execution_chain/beacon/api_handler/api_forkchoice.nim @@ -112,8 +112,8 @@ proc forkchoiceUpdated*(ben: BeaconEngineRef, number = header.number, hash = headHash.short, base = chain.baseNumber, - finHash= update.finalizedBlockHash, - safe = update.safeBlockHash, + finHash= update.finalizedBlockHash.short, + safe = update.safeBlockHash.short, pendingFCU = chain.finHash.short, resolvedFin= chain.resolvedFinNumber @@ -219,9 +219,11 @@ proc forkchoiceUpdated*(ben: BeaconEngineRef, info "Fork choice updated", requested = header.number, - hash = headHash.short, head = chain.latestNumber, + hashHash = headHash.short, base = chain.baseNumber, - baseHash = chain.baseHash.short + baseHash = chain.baseHash.short, + finalizedHash = finalizedBlockHash.short, + resolvedFin = chain.resolvedFinNumber return validFCU(Opt.none(Bytes8), headHash) diff --git a/execution_chain/core/chain/forked_chain/chain_serialize.nim b/execution_chain/core/chain/forked_chain/chain_serialize.nim index 05b06b018d..d291114b78 100644 --- a/execution_chain/core/chain/forked_chain/chain_serialize.nim +++ b/execution_chain/core/chain/forked_chain/chain_serialize.nim @@ -202,8 +202,22 @@ proc serialize*(fc: ForkedChainRef, txFrame: CoreDbTxRef): Result[void, CoreDbEr for i, brc in fc.branches: brc.index = uint i ?txFrame.put(FcStateKey.toOpenArray, rlp.encode(fc)) + var numBlocks = 0 for i, brc in fc.branches: + numBlocks += brc.len ?txFrame.put(branchIndexKey(i), rlp.encode(brc)) + + info "Blocks DAG written to database", + base=fc.baseBranch.tailNumber, + baseHash=fc.baseBranch.tailHash.short, + latest=fc.activeBranch.headNumber, + latestHash=fc.activeBranch.headHash.short, + head=fc.fcuHead.number, + headHash=fc.fcuHead.hash.short, + finalized=fc.latestFinalizedBlockNumber, + finalizedHash=fc.pendingFCU.short, + blocksInMemory=numBlocks + ok() proc deserialize*(fc: ForkedChainRef): Result[void, string] = diff --git a/execution_chain/core/executor/process_block.nim b/execution_chain/core/executor/process_block.nim index fe4f4cc99f..a46ccd69da 100644 --- a/execution_chain/core/executor/process_block.nim +++ b/execution_chain/core/executor/process_block.nim @@ -184,6 +184,7 @@ proc procBlkPreamble( proc procBlkEpilogue( vmState: BaseVMState, blk: Block, + prevStateRoot: Hash32, skipValidation: bool, skipReceipts: bool, skipStateRootCheck: bool, @@ -220,7 +221,8 @@ proc procBlkEpilogue( parentHash = header.parentHash, expected = header.stateRoot, actual = stateRoot, - arrivedFrom = vmState.parent.stateRoot + arrivedFrom = prevStateRoot, + parentStateRoot = vmState.parent.stateRoot return err("stateRoot mismatch, expect: " & $header.stateRoot & ", got: " & $stateRoot) @@ -286,13 +288,16 @@ proc processBlock*( # cases - since each block is bounded in the amount of memory needed, we can # run collection once per block instead. deferGc: + # When there is state root mismatch, we want to show where it arrived from + # using actual value stored in DB/txFrame beside showing parent.stateRoot. + let prevStateRoot = vmState.ledger.getStateRoot() ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) # EIP-3675: no reward for miner in POA/POS if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): vmState.calculateReward(blk.header, blk.uncles) - ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) + ?vmState.procBlkEpilogue(blk, prevStateRoot, skipValidation, skipReceipts, skipStateRootCheck) ok() diff --git a/execution_chain/core/tx_pool/tx_desc.nim b/execution_chain/core/tx_pool/tx_desc.nim index cc841b2484..529c4d23f6 100644 --- a/execution_chain/core/tx_pool/tx_desc.nim +++ b/execution_chain/core/tx_pool/tx_desc.nim @@ -171,7 +171,7 @@ proc getBalance(xp: TxPoolRef; account: Address): UInt256 = proc getNonce(xp: TxPoolRef; account: Address): AccountNonce = xp.vmState.ledger.getNonce(account) -proc classifyValid(xp: TxPoolRef; tx: Transaction, sender: Address): bool = +proc classifyValid(xp: TxPoolRef; tx: Transaction, sender: Address, blobsBundle: BlobsBundle): bool = if tx.gasLimit > TX_GAS_LIMIT: debug "Invalid transaction: Gas limit too high", @@ -232,12 +232,23 @@ proc classifyValid(xp: TxPoolRef; tx: Transaction, sender: Address): bool = debug "Invalid transaction: EIP-1559 transaction with maxFeePerGas lower than 1" return false - debug "Valid transaction", - txType = tx.txType, - sender = sender, - gasLimit = tx.gasLimit, - gasPrice = tx.gasPrice, - value = tx.value + if blobsBundle.isNil: + debug "Valid transaction", + txType = tx.txType, + sender = sender, + gasLimit = tx.gasLimit, + gasPrice = tx.gasPrice, + value = tx.value + else: + debug "Valid transaction", + txType = tx.txType, + sender = sender, + gasLimit = tx.gasLimit, + gasPrice = tx.gasPrice, + value = tx.value, + numBlobs = blobsBundle.blobs.len, + wrapperVersion = blobsBundle.wrapperVersion + true proc validateBlobTransactionWrapper(tx: PooledTransaction, fork: EVMFork): @@ -397,7 +408,7 @@ proc addTx*(xp: TxPoolRef, ptx: PooledTransaction): Result[void, TxError] = sender = sender return err(txErrorNonceTooSmall) - if not xp.classifyValid(ptx.tx, sender): + if not xp.classifyValid(ptx.tx, sender, ptx.blobsBundle): return err(txErrorTxInvalid) if xp.idTab.len >= MAX_POOL_SIZE: diff --git a/execution_chain/sync/wire_protocol/broadcast.nim b/execution_chain/sync/wire_protocol/broadcast.nim index 31410ad7c2..a91b7683e2 100644 --- a/execution_chain/sync/wire_protocol/broadcast.nim +++ b/execution_chain/sync/wire_protocol/broadcast.nim @@ -109,7 +109,7 @@ proc handleTransactionsBroadcast*(wire: EthWireRef, if packet.transactions.len == 0: return - debug "received new transactions", + debug "Received new transactions", number = packet.transactions.len wire.reqisterAction("TxPool consume incoming transactions"): @@ -135,7 +135,7 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, if packet.txHashes.len == 0: return - debug "received new pooled tx hashes", + debug "Received new pooled tx hashes", hashes = packet.txHashes.len if packet.txHashes.len != packet.txSizes.len or @@ -186,11 +186,11 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, try: res = await peer.getPooledTransactions(msg) except EthP2PError as exc: - debug "request pooled transactions failed", + debug "Request pooled transactions failed", msg=exc.msg if res.isNone: - debug "request pooled transactions get nothing" + debug "Request pooled transactions get nothing" return let @@ -200,7 +200,9 @@ proc handleTxHashesBroadcast*(wire: EthWireRef, # If we receive any blob transactions missing sidecars, or with # sidecars that don't correspond to the versioned hashes reported # in the header, disconnect from the sending peer. - let (size, hash) = getEncodedLengthAndHash(tx) + let + size = getEncodedLength(tx) # PooledTransacion: Transaction + blobsBundle size + hash = computeRlpHash(tx.tx) # Only inner tx hash map.withValue(hash, val) do: if tx.tx.txType.byte != val.txType: debug "Protocol Breach: Received transaction with type differ from announced", @@ -282,7 +284,7 @@ proc tickerLoop*(wire: EthWireRef) {.async: (raises: [CancelledError]).} = try: await peer.blockRangeUpdate(packet) except EthP2PError as exc: - debug "broadcast block range update failed", + debug "Broadcast block range update failed", msg=exc.msg awaitQuota(wire, blockRangeUpdateCost, "broadcast blockRangeUpdate") From 40233e5873ee6417099449eafdb81871eadafbb7 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 30 Jun 2025 16:25:01 +0700 Subject: [PATCH 108/138] Optimize getPayloadBodiesByHash/ByRange (#3433) * Optimize getPayloadBodiesByHash/ByRange Optimization area: - Optimize withdrawals sequence read and write from database. - Avoid conversion from rlp to tx to rlp again for TypedTransaction. - Decode web3 types direcly from rlp instead via nim-eth types for withdrawals. - Avoid conversion from nim-eth types by using web3 types in FC module if necessary. - Fix bug found in getPayloadBodiesByRange pulling excessive bodies from FC module. * Add fallback to old withdrawals format * Cosmetics --- .../beacon/api_handler/api_getbodies.nim | 49 ++++------ execution_chain/core/chain/forked_chain.nim | 66 ++++++++++++-- execution_chain/db/core_db/core_apps.nim | 49 +++++++--- execution_chain/db/payload_body_db.nim | 89 +++++++++++++++++++ execution_chain/db/storage_types.nim | 6 ++ 5 files changed, 209 insertions(+), 50 deletions(-) create mode 100644 execution_chain/db/payload_body_db.nim diff --git a/execution_chain/beacon/api_handler/api_getbodies.nim b/execution_chain/beacon/api_handler/api_getbodies.nim index c041fca754..985e2f2502 100644 --- a/execution_chain/beacon/api_handler/api_getbodies.nim +++ b/execution_chain/beacon/api_handler/api_getbodies.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed under either of # * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) # * MIT license ([LICENSE-MIT](LICENSE-MIT)) @@ -8,7 +8,6 @@ # those terms. import - std/[options, typetraits], eth/common/blocks, ../web3_eth_conv, ../beacon_engine, @@ -20,33 +19,21 @@ import const maxBodyRequest = 32 -func toPayloadBody(blk: Block): ExecutionPayloadBodyV1 {.raises:[].} = - var wds: seq[WithdrawalV1] - if blk.withdrawals.isSome: - for w in blk.withdrawals.get: - wds.add w3Withdrawal(w) - - ExecutionPayloadBodyV1( - transactions: w3Txs(blk.transactions), - # pre Shanghai block return null withdrawals - # post Shanghai block return at least empty slice - withdrawals: if blk.withdrawals.isSome: - Opt.some(wds) - else: - Opt.none(seq[WithdrawalV1]) - ) - proc getPayloadBodiesByHash*(ben: BeaconEngineRef, hashes: seq[Hash32]): seq[Opt[ExecutionPayloadBodyV1]] = if hashes.len > maxBodyRequest: raise tooLargeRequest("request exceeds max allowed " & $maxBodyRequest) + var list = newSeqOfCap[Opt[ExecutionPayloadBodyV1]](hashes.len) + for h in hashes: - let blk = ben.chain.blockByHash(h).valueOr: - result.add Opt.none(ExecutionPayloadBodyV1) + var body = ben.chain.payloadBodyV1ByHash(h).valueOr: + list.add Opt.none(ExecutionPayloadBodyV1) continue - result.add Opt.some(toPayloadBody(blk)) + list.add Opt.some(move(body)) + + move(list) proc getPayloadBodiesByRange*(ben: BeaconEngineRef, start: uint64, count: uint64): @@ -64,21 +51,23 @@ proc getPayloadBodiesByRange*(ben: BeaconEngineRef, last = start+count-1 if start > ben.chain.latestNumber: - # requested range beyond the latest known block + # requested range beyond the latest known block. return if last > ben.chain.latestNumber: last = ben.chain.latestNumber - # get bodies from database + var list = newSeqOfCap[Opt[ExecutionPayloadBodyV1]](last-start) + + # get bodies from database. for bn in start..min(last, ben.chain.baseNumber): - let blk = ben.chain.blockByNumber(bn).valueOr: - result.add Opt.none(ExecutionPayloadBodyV1) + var body = ben.chain.payloadBodyV1ByNumber(bn).valueOr: + list.add Opt.none(ExecutionPayloadBodyV1) continue - result.add Opt.some(blk.toPayloadBody) + list.add Opt.some(move(body)) + # get bodies from cache in FC module. if last > ben.chain.baseNumber: - let blocks = ben.chain.blockFromBaseTo(last) - for i in countdown(blocks.len-1, 0): - if blocks[i].header.number >= start: - result.add Opt.some(toPayloadBody(blocks[i])) + ben.chain.payloadBodyV1FromBaseTo(last, list) + + move(list) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 0d1653dd2d..ac5b20010b 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -11,12 +11,12 @@ {.push raises: [].} import + std/[tables, algorithm], chronicles, results, chronos, - std/[tables, algorithm], ../../common, - ../../db/[core_db, fcu_db], + ../../db/[core_db, fcu_db, payload_body_db], ../../evm/types, ../../evm/state, ../validate, @@ -28,6 +28,7 @@ import block_quarantine] from std/sequtils import mapIt +from web3/engine_api_types import ExecutionPayloadBodyV1 logScope: topics = "forked chain" @@ -892,6 +893,47 @@ proc blockByHash*(c: ForkedChainRef, blockHash: Hash32): Result[Block, string] = return c.portal.getBlockByHash(blockHash) blk +proc payloadBodyV1ByHash*(c: ForkedChainRef, blockHash: Hash32): Result[ExecutionPayloadBodyV1, string] = + c.hashToBlock.withValue(blockHash, loc): + return ok(toPayloadBody(loc[].blk)) + + let header = ?c.baseTxFrame.getBlockHeader(blockHash) + var blk = c.baseTxFrame.getExecutionPayloadBodyV1(header) + + # Serves portal data if block not found in db + if blk.isErr or (blk.get.transactions.len == 0 and header.transactionsRoot != zeroHash32): + if c.isPortalActive: + let blk = ?c.portal.getBlockByHash(blockHash) + return ok(toPayloadBody(blk)) + + move(blk) + +proc payloadBodyV1ByNumber*(c: ForkedChainRef, number: BlockNumber): Result[ExecutionPayloadBodyV1, string] = + if number > c.activeBranch.headNumber: + return err("Requested block number not exists: " & $number) + + if number <= c.baseBranch.tailNumber: + let + header = ?c.baseTxFrame.getBlockHeader(number) + blk = c.baseTxFrame.getExecutionPayloadBodyV1(header) + + # Txs not there in db - Happens during era1/era import, when we don't store txs and receipts + if blk.isErr or (blk.get.transactions.len == 0 and header.transactionsRoot != emptyRoot): + # Serves portal data if block not found in database + if c.isPortalActive: + let blk = ?c.portal.getBlockByNumber(number) + return ok(toPayloadBody(blk)) + + return blk + + var branch = c.activeBranch + while not branch.isNil: + if number >= branch.tailNumber: + return ok(toPayloadBody(branch.blocks[number - branch.tailNumber].blk)) + branch = branch.parent + + err("Block not found, number = " & $number) + proc blockByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Block, string] = if number > c.activeBranch.headNumber: return err("Requested block number not exists: " & $number) @@ -929,14 +971,26 @@ proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[Stor c.baseTxFrame.getReceipts(header.receiptsRoot) -func blockFromBaseTo*(c: ForkedChainRef, number: BlockNumber): seq[Block] = +func payloadBodyV1FromBaseTo*(c: ForkedChainRef, + last: BlockNumber, + list: var seq[Opt[ExecutionPayloadBodyV1]]) = # return block in reverse order - var branch = c.activeBranch + var + branch = c.activeBranch + branches = newSeqOfCap[BranchRef](c.branches.len) + while not branch.isNil: - for i in countdown(branch.len-1, 0): - result.add(branch.blocks[i].blk) + branches.add(branch) branch = branch.parent + for i in countdown(branches.len-1, 0): + branch = branches[i] + for y in 0.. last: + return + list.add Opt.some(toPayloadBody(bd.blk)) + func equalOrAncestorOf*(c: ForkedChainRef, blockHash: Hash32, childHash: Hash32): bool = if blockHash == childHash: return true diff --git a/execution_chain/db/core_db/core_apps.nim b/execution_chain/db/core_db/core_apps.nim index 29c7e1ceb5..966d13f804 100644 --- a/execution_chain/db/core_db/core_apps.nim +++ b/execution_chain/db/core_db/core_apps.nim @@ -111,8 +111,9 @@ iterator getBlockTransactionHashes*( iterator getWithdrawals*( db: CoreDbTxRef; + T: type; withdrawalsRoot: Hash32; - ): Withdrawal {.raises: [RlpError].} = + ): T {.raises: [RlpError].} = block body: if withdrawalsRoot == EMPTY_ROOT_HASH: break body @@ -124,7 +125,7 @@ iterator getWithdrawals*( break body if data.len == 0: break body - yield rlp.decode(data, Withdrawal) + yield rlp.decode(data, T) iterator getReceipts*( db: CoreDbTxRef; @@ -355,21 +356,42 @@ proc persistWithdrawals*( const info = "persistWithdrawals()" if withdrawals.len == 0: return - for idx, wd in withdrawals: - let key = hashIndexKey(withdrawalsRoot, idx.uint16) - db.put(key, rlp.encode(wd)).isOkOr: - warn info, idx, error=($$error) + + db.put(withdrawalsKey(withdrawalsRoot).toOpenArray, + rlp.encode(withdrawals)).isOkOr: + warn info, error=($$error) return + when false: + # Ol withdrawals format + # Obsolete. Keep it for reference + for idx, wd in withdrawals: + let key = hashIndexKey(withdrawalsRoot, idx.uint16) + db.put(key, rlp.encode(wd)).isOkOr: + warn info, idx, error=($$error) + return + proc getWithdrawals*( db: CoreDbTxRef; withdrawalsRoot: Hash32 ): Result[seq[Withdrawal], string] = + const info = "getWithdrawals()" + wrapRlpException "getWithdrawals": - var res: seq[Withdrawal] - for wd in db.getWithdrawals(withdrawalsRoot): - res.add(wd) - return ok(res) + var list: seq[Withdrawal] + let res = db.get(withdrawalsKey(withdrawalsRoot).toOpenArray) + + if res.isErr: + if res.error.error != KvtNotFound: + warn info, withdrawalsRoot, error=($$res.error) + else: + # Fallback to old withdrawals format + for wd in db.getWithdrawals(Withdrawal, withdrawalsRoot): + list.add(wd) + else: + list = rlp.decode(res.value, seq[Withdrawal]) + + return ok(move(list)) proc getTransactions*( db: CoreDbTxRef; @@ -379,7 +401,7 @@ proc getTransactions*( var res: seq[Transaction] for encodedTx in db.getBlockTransactionData(txRoot): res.add(rlp.decode(encodedTx, Transaction)) - return ok(res) + return ok(move(res)) proc getBlockBody*( db: CoreDbTxRef; @@ -393,7 +415,7 @@ proc getBlockBody*( if header.withdrawalsRoot.isSome: let wds = ?db.getWithdrawals(header.withdrawalsRoot.get) body.withdrawals = Opt.some(wds) - return ok(body) + return ok(move(body)) proc getBlockBody*( db: CoreDbTxRef; @@ -417,8 +439,7 @@ proc getEthBlock*( ): Result[EthBlock, string] = var header = ?db.getBlockHeader(blockNumber) - headerHash = header.computeBlockHash - blockBody = ?db.getBlockBody(headerHash) + blockBody = ?db.getBlockBody(header) ok(EthBlock.init(move(header), move(blockBody))) diff --git a/execution_chain/db/payload_body_db.nim b/execution_chain/db/payload_body_db.nim new file mode 100644 index 0000000000..92352fc307 --- /dev/null +++ b/execution_chain/db/payload_body_db.nim @@ -0,0 +1,89 @@ +# nimbus-execution-client +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed except +# according to those terms. + +{.push gcsafe, raises: [].} + +import + chronicles, + web3/engine_api_types, + eth/common/blocks_rlp, + eth/common/hashes, + ./core_db/base, + ./core_db/core_apps, + ./storage_types, + ../constants, + ../beacon/web3_eth_conv + +# ------------------------------------------------------------------------------ +# Private helpers +# ------------------------------------------------------------------------------ + +template wrapRlpException(info: static[string]; code: untyped) = + try: + code + except RlpError as e: + return err(info & ": " & e.msg) + +proc read(rlp: var Rlp, T: type Quantity): T {.raises: [RlpError].} = + rlp.read(uint64).Quantity + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +proc getExecutionPayloadBodyV1*( + db: CoreDbTxRef; + header: Header; + ): Result[ExecutionPayloadBodyV1, string] = + const info = "getExecutionPayloadBodyV1()" + var body: ExecutionPayloadBodyV1 + + for encodedTx in db.getBlockTransactionData(header.txRoot): + body.transactions.add TypedTransaction(encodedTx) + + if header.withdrawalsRoot.isSome: + let withdrawalsRoot = header.withdrawalsRoot.value + if withdrawalsRoot == emptyRoot: + var wds: seq[WithdrawalV1] + body.withdrawals = Opt.some(wds) + return ok(move(body)) + + wrapRlpException info: + let bytes = db.get(withdrawalsKey(withdrawalsRoot).toOpenArray).valueOr: + if error.error != KvtNotFound: + warn info, withdrawalsRoot, error=($$error) + else: + # Fallback to old withdrawals format + var wds: seq[WithdrawalV1] + for wd in db.getWithdrawals(WithdrawalV1, withdrawalsRoot): + wds.add(wd) + body.withdrawals = Opt.some(wds) + return ok(move(body)) + + var list = rlp.decode(bytes, seq[WithdrawalV1]) + body.withdrawals = Opt.some(move(list)) + + ok(move(body)) + +func toPayloadBody*(blk: Block): ExecutionPayloadBodyV1 {.raises:[].} = + var wds: seq[WithdrawalV1] + if blk.withdrawals.isSome: + for w in blk.withdrawals.get: + wds.add w3Withdrawal(w) + + ExecutionPayloadBodyV1( + transactions: w3Txs(blk.transactions), + # pre Shanghai block return null withdrawals + # post Shanghai block return at least empty slice + withdrawals: if blk.withdrawals.isSome: + Opt.some(wds) + else: + Opt.none(seq[WithdrawalV1]) + ) diff --git a/execution_chain/db/storage_types.nim b/execution_chain/db/storage_types.nim index 7ef00d4b92..96631b763f 100644 --- a/execution_chain/db/storage_types.nim +++ b/execution_chain/db/storage_types.nim @@ -27,6 +27,7 @@ type fcuNumAndHash = 8 fcState = 9 beaconHeader = 10 + wdKey = 11 DbKey* = object # The first byte stores the key type. The rest are key-specific values @@ -98,6 +99,11 @@ func beaconHeaderKey*(u: BlockNumber): DbKey = func fcStateKey*(u: uint64): DbKey {.inline.} = uint64KeyImpl(fcState) +func withdrawalsKey*(h: Hash32): DbKey {.inline.} = + result.data[0] = byte ord(wdKey) + result.data[1 .. 32] = h.data + result.dataEndPos = uint8 32 + template toOpenArray*(k: DbKey): openArray[byte] = k.data.toOpenArray(0, int(k.dataEndPos)) From fcc219987c688ff5f5391f44ae1aa75c97e91314 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 30 Jun 2025 19:43:18 +0700 Subject: [PATCH 109/138] Revert: don't call extra stateroot computation in procesBlock (#3435) --- execution_chain/core/executor/process_block.nim | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/execution_chain/core/executor/process_block.nim b/execution_chain/core/executor/process_block.nim index a46ccd69da..a4cfa9a390 100644 --- a/execution_chain/core/executor/process_block.nim +++ b/execution_chain/core/executor/process_block.nim @@ -184,7 +184,6 @@ proc procBlkPreamble( proc procBlkEpilogue( vmState: BaseVMState, blk: Block, - prevStateRoot: Hash32, skipValidation: bool, skipReceipts: bool, skipStateRootCheck: bool, @@ -221,7 +220,6 @@ proc procBlkEpilogue( parentHash = header.parentHash, expected = header.stateRoot, actual = stateRoot, - arrivedFrom = prevStateRoot, parentStateRoot = vmState.parent.stateRoot return err("stateRoot mismatch, expect: " & $header.stateRoot & ", got: " & $stateRoot) @@ -288,16 +286,13 @@ proc processBlock*( # cases - since each block is bounded in the amount of memory needed, we can # run collection once per block instead. deferGc: - # When there is state root mismatch, we want to show where it arrived from - # using actual value stored in DB/txFrame beside showing parent.stateRoot. - let prevStateRoot = vmState.ledger.getStateRoot() ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) # EIP-3675: no reward for miner in POA/POS if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): vmState.calculateReward(blk.header, blk.uncles) - ?vmState.procBlkEpilogue(blk, prevStateRoot, skipValidation, skipReceipts, skipStateRootCheck) + ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) ok() From f268688da8d296f91d518bcc6ec7c66c85723fd1 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Tue, 1 Jul 2025 14:49:28 +0200 Subject: [PATCH 110/138] aristo: fold Adm column family into Vtx (#3383) Each column family in rocksdb requires its own set of SST files that must be kept open, cached etc. Further, wal files are [deleted]() only once all column families referencing them have been flushed meaning that low-volume families like Adm can keep them around far longer than makes sense. Adm contains only two useful metadata entries and therefore it doesn't really make sense to allocate an entire CF for it. Consolidating Adm into Vtx also makes it easier to reason about the internal consistency of the Vtx table - even though rocksdb ensures atomic cross-cf writes via the wal, it requires using a special batch write API that introduces its own overhead. With this change, don't need to rely on this particular rocksdb feature to maintain atomic consistency within Vtx. Databases using the old schema are supported but rollback is not (ie the old metadata format/CF is read but not written) --- execution_chain/db/aristo/aristo_blobify.nim | 24 +++++-- execution_chain/db/aristo/aristo_desc.nim | 2 - .../db/aristo/aristo_desc/desc_backend.nim | 6 -- .../db/aristo/aristo_desc/desc_structural.nim | 8 ++- execution_chain/db/aristo/aristo_get.nim | 8 +-- .../db/aristo/aristo_init/init_common.nim | 2 +- .../db/aristo/aristo_init/memory_db.nim | 22 +----- .../db/aristo/aristo_init/rocks_db.nim | 68 +++++++++---------- .../aristo/aristo_init/rocks_db/rdb_desc.nim | 6 +- .../aristo/aristo_init/rocks_db/rdb_get.nim | 20 ++++++ .../aristo/aristo_init/rocks_db/rdb_init.nim | 4 +- .../aristo/aristo_init/rocks_db/rdb_put.nim | 12 ++-- execution_chain/db/aristo/aristo_tx_frame.nim | 3 +- .../db/core_db/backend/aristo_rocksdb.nim | 5 +- 14 files changed, 100 insertions(+), 90 deletions(-) diff --git a/execution_chain/db/aristo/aristo_blobify.nim b/execution_chain/db/aristo/aristo_blobify.nim index 4fa8cdc04a..26745a503f 100644 --- a/execution_chain/db/aristo/aristo_blobify.nim +++ b/execution_chain/db/aristo/aristo_blobify.nim @@ -224,7 +224,7 @@ proc blobify*(vtx: VertexRef, key: HashKey): seq[byte] = proc blobifyTo*(lSst: SavedState; data: var seq[byte]) = ## Serialise a last saved state record - data.add lSst.key.data + data.add lSst.vTop.uint64.toBytesBE data.add lSst.serial.toBytesBE data.add @[0x7fu8] @@ -340,8 +340,8 @@ proc deblobify*(record: openArray[byte], T: type HashKey): Opt[HashKey] = proc deblobify*( data: openArray[byte]; - T: type SavedState; - ): Result[SavedState,AristoError] = + T: type SavedStateV0; + ): Result[SavedStateV0,AristoError] = ## De-serialise the last saved state data record previously encoded with ## `blobify()`. if data.len != 41: @@ -349,10 +349,26 @@ proc deblobify*( if data[^1] != 0x7f: return err(DeblobWrongType) - ok(SavedState( + ok(SavedStateV0( key: Hash32(array[32, byte].initCopyFrom(data.toOpenArray(0, 31))), serial: uint64.fromBytesBE data.toOpenArray(32, 39))) +proc deblobify*( + data: openArray[byte]; + T: type SavedState; + ): Result[SavedState,AristoError] = + ## De-serialise the last saved state data record previously encoded with + ## `blobify()`. + if data.len != 17: + debugEcho "data ", data.len + return err(DeblobWrongSize) + if data[^1] != 0x7f: + return err(DeblobWrongType) + + ok(SavedState( + vTop: VertexID(uint64.fromBytesBE data.toOpenArray(0, 7)), + serial: uint64.fromBytesBE data.toOpenArray(8, 15))) + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/aristo/aristo_desc.nim b/execution_chain/db/aristo/aristo_desc.nim index 12a7508426..3ad8182c47 100644 --- a/execution_chain/db/aristo/aristo_desc.nim +++ b/execution_chain/db/aristo/aristo_desc.nim @@ -97,12 +97,10 @@ type ## Backend interface. getVtxFn*: GetVtxFn ## Read vertex record getKeyFn*: GetKeyFn ## Read Merkle hash/key - getTuvFn*: GetTuvFn ## Read top used vertex ID getLstFn*: GetLstFn ## Read saved state putBegFn*: PutBegFn ## Start bulk store session putVtxFn*: PutVtxFn ## Bulk store vertex records - putTuvFn*: PutTuvFn ## Store top used vertex ID putLstFn*: PutLstFn ## Store saved state putEndFn*: PutEndFn ## Commit bulk store session diff --git a/execution_chain/db/aristo/aristo_desc/desc_backend.nim b/execution_chain/db/aristo/aristo_desc/desc_backend.nim index cb3cf8245b..fe294fe41d 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_backend.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_backend.nim @@ -57,12 +57,6 @@ type ## Generic backend database bulk storage function, `VertexRef(nil)` ## values indicate that records should be deleted. - PutTuvFn* = - proc(hdl: PutHdlRef; vs: VertexID) - {.gcsafe, raises: [].} - ## Generic backend database ID generator storage function for the - ## top used vertex ID. - PutLstFn* = proc(hdl: PutHdlRef; lst: SavedState) {.gcsafe, raises: [].} diff --git a/execution_chain/db/aristo/aristo_desc/desc_structural.nim b/execution_chain/db/aristo/aristo_desc/desc_structural.nim index 4cd75bef35..85fa93c3b4 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_structural.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_structural.nim @@ -79,9 +79,15 @@ type vid*: VertexID ## Table lookup vertex ID (if any) vtx*: VertexRef ## Reference to vertex + SavedStateV0* = object + ## Legacy saved state from when state lived in separate column family + ## TODO remove before beta + key*: Hash32 ## Some state hash (if any) + serial*: uint64 ## Generic identifier from application + SavedState* = object ## Last saved state - key*: Hash32 ## Some state hash (if any) + vTop*: VertexID ## Top used VertexID serial*: uint64 ## Generic identifier from application GetVtxFlag* = enum diff --git a/execution_chain/db/aristo/aristo_get.nim b/execution_chain/db/aristo/aristo_get.nim index 7deb7af626..184c89b79b 100644 --- a/execution_chain/db/aristo/aristo_get.nim +++ b/execution_chain/db/aristo/aristo_get.nim @@ -21,18 +21,12 @@ import # Public functions # ------------------------------------------------------------------------------ -proc getTuvBe*( - db: AristoDbRef; - ): Result[VertexID,AristoError] = - ## Get the ID generator state from the backened if available. - db.getTuvFn() - proc getLstBe*( db: AristoDbRef; ): Result[SavedState,AristoError] = ## Get the last saved state db.getLstFn() - + proc getVtxBe*( db: AristoDbRef; rvid: RootedVertexID; diff --git a/execution_chain/db/aristo/aristo_init/init_common.nim b/execution_chain/db/aristo/aristo_init/init_common.nim index 83ca547a37..73b0628d90 100644 --- a/execution_chain/db/aristo/aristo_init/init_common.nim +++ b/execution_chain/db/aristo/aristo_init/init_common.nim @@ -82,7 +82,7 @@ proc finishSession*(hdl: TypedPutHdlRef; db: TypedBackendRef) = db.txId = 0 proc initInstance*(db: AristoDbRef): Result[void, AristoError] = - let vTop = ?db.getTuvFn() + let vTop = (?db.getLstFn()).vTop db.txRef = AristoTxRef(db: db, vTop: vTop, snapshot: Snapshot(level: Opt.some(0))) db.accLeaves = LruCache[Hash32, AccLeafRef].init(ACC_LRU_SIZE) db.stoLeaves = LruCache[Hash32, StoLeafRef].init(ACC_LRU_SIZE) diff --git a/execution_chain/db/aristo/aristo_init/memory_db.nim b/execution_chain/db/aristo/aristo_init/memory_db.nim index 1e0379a4ef..b0d5c35d49 100644 --- a/execution_chain/db/aristo/aristo_init/memory_db.nim +++ b/execution_chain/db/aristo/aristo_init/memory_db.nim @@ -41,12 +41,10 @@ const type MemBackendRef* = ref object of TypedBackendRef sTab*: Table[RootedVertexID,seq[byte]] ## Structural vertex table making up a trie - tUvi*: Opt[VertexID] ## Top used vertex ID lSst*: Opt[SavedState] ## Last saved state MemPutHdlRef = ref object of TypedPutHdlRef sTab: Table[RootedVertexID,seq[byte]] - tUvi: Opt[VertexID] lSst: Opt[SavedState] when extraTraceMessages: @@ -100,15 +98,10 @@ func getKeyFn(db: MemBackendRef): GetKeyFn = return ok((key, nil)) err(GetKeyNotFound) -func getTuvFn(db: MemBackendRef): GetTuvFn = - result = - proc(): Result[VertexID,AristoError]= - db.tUvi or ok(VertexID(0)) - func getLstFn(db: MemBackendRef): GetLstFn = result = proc(): Result[SavedState,AristoError]= - db.lSst or err(GetLstNotFound) + db.lSst or ok(default(SavedState)) # ------------- @@ -128,13 +121,6 @@ func putVtxFn(db: MemBackendRef): PutVtxFn = else: hdl.sTab[rvid] = EmptyBlob -func putTuvFn(db: MemBackendRef): PutTuvFn = - result = - proc(hdl: PutHdlRef; vs: VertexID) = - let hdl = hdl.getSession db - if hdl.error.isNil: - hdl.tUvi = Opt.some(vs) - func putLstFn(db: MemBackendRef): PutLstFn = result = proc(hdl: PutHdlRef; lst: SavedState) = @@ -163,10 +149,6 @@ func putEndFn(db: MemBackendRef): PutEndFn = else: db.sTab.del vid - let tuv = hdl.tUvi.get(otherwise = VertexID(0)) - if tuv.isValid: - db.tUvi = Opt.some(tuv) - if hdl.lSst.isSome: db.lSst = hdl.lSst @@ -190,12 +172,10 @@ func memoryBackend*(): AristoDbRef = db.getVtxFn = getVtxFn be db.getKeyFn = getKeyFn be - db.getTuvFn = getTuvFn be db.getLstFn = getLstFn be db.putBegFn = putBegFn be db.putVtxFn = putVtxFn be - db.putTuvFn = putTuvFn be db.putLstFn = putLstFn be db.putEndFn = putEndFn be diff --git a/execution_chain/db/aristo/aristo_init/rocks_db.nim b/execution_chain/db/aristo/aristo_init/rocks_db.nim index 240c6aa4e2..d5b1a98461 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db.nim @@ -104,33 +104,50 @@ proc getKeyFn(db: RdbBackendRef): GetKeyFn = err(GetKeyNotFound) -proc getTuvFn(db: RdbBackendRef): GetTuvFn = - result = - proc(): Result[VertexID,AristoError]= +proc getTuv(db: RdbBackendRef): Result[VertexID,AristoError]= + # Fetch serialised data record. + let data = db.rdb.getAdm(AdmTabIdTuv).valueOr: + when extraTraceMessages: + trace logTxt "getTuvFn: failed", error=error[0], info=error[1] + return err(error[0]) - # Fetch serialised data record. - let data = db.rdb.getAdm(AdmTabIdTuv).valueOr: - when extraTraceMessages: - trace logTxt "getTuvFn: failed", error=error[0], info=error[1] - return err(error[0]) + # Decode data record + if data.len == 0: + return ok VertexID(0) - # Decode data record - if data.len == 0: - return ok VertexID(0) + # Decode data record + data.deblobify VertexID - # Decode data record - result = data.deblobify VertexID +proc getLstV0(db: RdbBackendRef): Result[SavedStateV0,AristoError] = + let data = db.rdb.getAdm(AdmTabIdLst).valueOr: + when extraTraceMessages: + trace logTxt "getTuvFn: failed", error=error[0], info=error[1] + return err(error[0]) + + if data.len == 0: + return ok default(SavedStateV0) + + # Decode data record + data.deblobify SavedStateV0 proc getLstFn(db: RdbBackendRef): GetLstFn = result = proc(): Result[SavedState,AristoError]= # Fetch serialised data record. - let data = db.rdb.getAdm(AdmTabIdLst).valueOr: + let data = db.rdb.getAdm().valueOr: when extraTraceMessages: trace logTxt "getLstFn: failed", error=error[0], info=error[1] return err(error[0]) + if data.len == 0: + # TODO legacy database support, remove before beta + let + lst = ?db.getLstV0() + vTop = ?db.getTuv() + + return ok(SavedState(vTop: vTop, serial: lst.serial)) + # Decode data record data.deblobify SavedState @@ -153,33 +170,18 @@ proc putVtxFn(db: RdbBackendRef): PutVtxFn = code: error[1], info: error[2]) -proc putTuvFn(db: RdbBackendRef): PutTuvFn = - result = - proc(hdl: PutHdlRef; vs: VertexID) = - let hdl = hdl.getSession db - if hdl.error.isNil: - if vs.isValid: - db.rdb.putAdm(hdl.session, AdmTabIdTuv, vs.blobify.data()).isOkOr: - hdl.error = TypedPutHdlErrRef( - pfx: AdmPfx, - aid: AdmTabIdTuv, - code: error[1], - info: error[2]) - return - - proc putLstFn(db: RdbBackendRef): PutLstFn = result = proc(hdl: PutHdlRef; lst: SavedState) = let hdl = hdl.getSession db if hdl.error.isNil: let data = lst.blobify - db.rdb.putAdm(hdl.session, AdmTabIdLst, data).isOkOr: + db.rdb.putAdm(hdl.session, data).isOkOr: hdl.error = TypedPutHdlErrRef( pfx: AdmPfx, aid: AdmTabIdLst, - code: error[1], - info: error[2]) + code: error[0], + info: error[1]) proc putEndFn(db: RdbBackendRef): PutEndFn = result = @@ -226,12 +228,10 @@ proc rocksDbBackend*( db.getVtxFn = getVtxFn be db.getKeyFn = getKeyFn be - db.getTuvFn = getTuvFn be db.getLstFn = getLstFn be db.putBegFn = putBegFn be db.putVtxFn = putVtxFn be - db.putTuvFn = putTuvFn be db.putLstFn = putLstFn be db.putEndFn = putEndFn be diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim index 26b41983a4..fb047789f0 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_desc.nim @@ -24,6 +24,8 @@ import export minilru, rocksdb_desc +const AdmKey* = default(seq[byte]) + type RdbWriteEventCb* = proc(session: WriteBatchRef): bool {.gcsafe, raises: [].} @@ -38,7 +40,7 @@ type RdbInst* = object baseDb*: RocksDbInstanceRef - admCol*: ColFamilyReadWrite ## Admin column family handler + admCol*: ColFamilyReadWrite ## Legacy column family for administrative data vtxCol*: ColFamilyReadWrite ## Vertex column family handler # Note that the key type `VertexID` for LRU caches requires that there is @@ -64,7 +66,7 @@ type AristoCFs* = enum ## Column family symbols/handles and names used on the database - AdmCF = "AriAdm" ## Admin column family name + AdmCF = "AriAdm" ## Admin column family name (deprecated) VtxCF = "AriVtx" ## Vertex column family name RdbLruCounter* = array[bool, Atomic[uint64]] diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim index 15ec67e5a2..6f0669c343 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_get.nim @@ -103,7 +103,11 @@ when defined(metrics): # Public functions # ------------------------------------------------------------------------------ + proc getAdm*(rdb: RdbInst, xid: AdminTabID): Result[seq[byte], (AristoError, string)] = + if isNil(rdb.admCol.handle()): + return ok(default(seq[byte])) + var res: seq[byte] let onData = proc(data: openArray[byte]) = res = @data @@ -119,6 +123,22 @@ proc getAdm*(rdb: RdbInst, xid: AdminTabID): Result[seq[byte], (AristoError, str res = EmptyBlob ok move(res) +proc getAdm*(rdb: RdbInst): Result[seq[byte], (AristoError, string)] = + var res: seq[byte] + let onData = proc(data: openArray[byte]) = + res = @data + + let gotData = rdb.vtxCol.get(AdmKey, onData).valueOr: + const errSym = RdbBeDriverGetAdmError + when extraTraceMessages: + trace logTxt "getAdm", xid, error = errSym, info = error + return err((errSym, error)) + + # Correct result if needed + if not gotData: + res = EmptyBlob + ok move(res) + proc getKey*( rdb: var RdbInst, rvid: RootedVertexID, flags: set[GetVtxFlag] ): Result[(HashKey, VertexRef), (AristoError, string)] = diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim index b875846d58..6670195d03 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_init.nim @@ -114,8 +114,8 @@ proc init*(rdb: var RdbInst, opts: DbOptions, baseDb: RocksDbInstanceRef) = ) # Initialise column handlers (this stores implicitely `baseDb`) - rdb.admCol = baseDb.db.getColFamily($AdmCF).valueOr: - raiseAssert "Cannot initialise AdmCF descriptor: " & error + rdb.admCol = baseDb.db.getColFamily($AdmCF).valueOr(default(ColFamilyReadWrite)) + rdb.vtxCol = baseDb.db.getColFamily($VtxCF).valueOr: raiseAssert "Cannot initialise VtxCF descriptor: " & error diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim index 1a5ee71bd0..1187e4c1b5 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim @@ -17,7 +17,6 @@ import rocksdb, results, ../../[aristo_blobify, aristo_desc], - ../init_common, ./rdb_desc const @@ -56,22 +55,21 @@ proc commit*(rdb: var RdbInst, session: SharedWriteBatchRef): Result[void,(Arist proc putAdm*( rdb: var RdbInst; session: SharedWriteBatchRef, - xid: AdminTabID; data: openArray[byte]; - ): Result[void,(AdminTabID,AristoError,string)] = + ): Result[void,(AristoError,string)] = let dsc = session.batch if data.len == 0: - dsc.delete(xid.toOpenArray, rdb.admCol.handle()).isOkOr: + dsc.delete(AdmKey, rdb.vtxCol.handle()).isOkOr: const errSym = RdbBeDriverDelAdmError when extraTraceMessages: trace logTxt "putAdm()", xid, error=errSym, info=error - return err((xid,errSym,error)) + return err((errSym,error)) else: - dsc.put(xid.toOpenArray, data, rdb.admCol.handle()).isOkOr: + dsc.put(AdmKey, data, rdb.vtxCol.handle()).isOkOr: const errSym = RdbBeDriverPutAdmError when extraTraceMessages: trace logTxt "putAdm()", xid, error=errSym, info=error - return err((xid,errSym,error)) + return err((errSym,error)) ok() proc putVtx*( diff --git a/execution_chain/db/aristo/aristo_tx_frame.nim b/execution_chain/db/aristo/aristo_tx_frame.nim index a9f87fbe30..2b1cfb4882 100644 --- a/execution_chain/db/aristo/aristo_tx_frame.nim +++ b/execution_chain/db/aristo/aristo_tx_frame.nim @@ -118,7 +118,7 @@ proc persist*( return let lSst = SavedState( - key: emptyRoot, # placeholder for more + vTop: txFrame.vTop, serial: txFrame.blockNumber.expect("`checkpoint` before persisting frame"), ) @@ -203,7 +203,6 @@ with --debug-eager-state-root.""" do: db.putVtxFn(batch, rvid, vtx, default(HashKey)) - db.putTuvFn(batch, txFrame.vTop) db.putLstFn(batch, lSst) # TODO above, we only prepare the changes to the database but don't actually diff --git a/execution_chain/db/core_db/backend/aristo_rocksdb.nim b/execution_chain/db/core_db/backend/aristo_rocksdb.nim index 190f3b32de..ec8c0f3769 100644 --- a/execution_chain/db/core_db/backend/aristo_rocksdb.nim +++ b/execution_chain/db/core_db/backend/aristo_rocksdb.nim @@ -152,9 +152,12 @@ proc newRocksDbCoreDbRef*(basePath: string, opts: DbOptions): CoreDbRef = # The same column family options are used for all column families meaning that # the options are a compromise between the various write and access patterns # of what's stored in there - there's room for improvement here! + + # Legacy support: adm CF, if it exists + let (dbOpts, cfOpts) = opts.toRocksDb() - cfDescs = (AristoCFs.items().toSeq().mapIt($it) & KvtCFs.items().toSeq().mapIt($it)) + cfDescs = @[$AristoCFs.VtxCF] & KvtCFs.items().toSeq().mapIt($it) baseDb = RocksDbInstanceRef.open(basePath, dbOpts, cfOpts, cfDescs).expect( "Open database from " & basePath ) From 6ec7ab00103a98a8ab75f85202edd9fc33a16374 Mon Sep 17 00:00:00 2001 From: Chirag Parmar Date: Tue, 1 Jul 2025 18:43:29 +0530 Subject: [PATCH 111/138] update nim-eth to `92a02b672f60e6b5e5ea570d684904c289b495fa` (#3428) * update nim-eth * point to master * fix --- nimbus_verified_proxy/tests/test_header_store.nim | 6 +++--- portal/network/beacon/beacon_db.nim | 2 +- portal/tools/eth_data_exporter/cl_data_exporter.nim | 2 -- vendor/nim-eth | 2 +- vendor/nimbus-eth2 | 2 +- 5 files changed, 6 insertions(+), 8 deletions(-) diff --git a/nimbus_verified_proxy/tests/test_header_store.nim b/nimbus_verified_proxy/tests/test_header_store.nim index a497375fc6..fe83847506 100644 --- a/nimbus_verified_proxy/tests/test_header_store.nim +++ b/nimbus_verified_proxy/tests/test_header_store.nim @@ -12,8 +12,7 @@ import stint/endians2, eth/common/headers_rlp, ../header_store, - beacon_chain/spec/forks, - beacon_chain/spec/helpers + beacon_chain/spec/[forks, digest, helpers] func headerGenerator(number: int): ForkedLightClientHeader = ForkedLightClientHeader( @@ -21,7 +20,8 @@ func headerGenerator(number: int): ForkedLightClientHeader = capellaData: capella.LightClientHeader( beacon: default(capella.BeaconBlockHeader), execution: capella.ExecutionPayloadHeader( - block_number: uint64(number), block_hash: Hash32(toBytesBE(u256(number))) + block_number: uint64(number), + block_hash: Eth2Digest(data: toBytesBE(u256(number))), ), execution_branch: default(capella.ExecutionBranch), ), diff --git a/portal/network/beacon/beacon_db.nim b/portal/network/beacon/beacon_db.nim index 5bb545ade7..5d304812b1 100644 --- a/portal/network/beacon/beacon_db.nim +++ b/portal/network/beacon/beacon_db.nim @@ -15,7 +15,7 @@ import stint, results, ssz_serialization, - beacon_chain/db_limits, + beacon_chain/db_utils, beacon_chain/spec/forks, beacon_chain/spec/forks_light_client, ./beacon_content, diff --git a/portal/tools/eth_data_exporter/cl_data_exporter.nim b/portal/tools/eth_data_exporter/cl_data_exporter.nim index 4265639a4e..977903d938 100644 --- a/portal/tools/eth_data_exporter/cl_data_exporter.nim +++ b/portal/tools/eth_data_exporter/cl_data_exporter.nim @@ -31,8 +31,6 @@ import ./exporter_common, ./downloader -from beacon_chain/el/el_manager import toBeaconBlockHeader - export beacon_clock const diff --git a/vendor/nim-eth b/vendor/nim-eth index ac94813ab3..92a02b672f 160000 --- a/vendor/nim-eth +++ b/vendor/nim-eth @@ -1 +1 @@ -Subproject commit ac94813ab385a490bac40affbb351593edaf5d9f +Subproject commit 92a02b672f60e6b5e5ea570d684904c289b495fa diff --git a/vendor/nimbus-eth2 b/vendor/nimbus-eth2 index d6d2f006d5..9823087a13 160000 --- a/vendor/nimbus-eth2 +++ b/vendor/nimbus-eth2 @@ -1 +1 @@ -Subproject commit d6d2f006d507ab2e27b8477064feaad45ff99016 +Subproject commit 9823087a131444e4c030de9defc62d019233019d From f88c6fe929e2a94b9265cb3a4744dfd918fac80d Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 2 Jul 2025 13:40:07 +0700 Subject: [PATCH 112/138] feat: add admin_peers and admin ns (#3431) (#3437) * feat: add admin_peers and admin ns (#3431) * feat: add admin_peers and admin ns * fix redundant boolean checks and import std sections * move caps in the main block * setup admin and quit combined into one call * fix compile issues * Add export marker * Fix tests * Restore invalid request exeception in admin_addPeer * Chicken and egg * oops * fix: string -> int for ports.discovery and listener (#3438) * fix: string -> int for ports.discovery and listener * use int not hex * fix test * Add export marker * Add comments --------- Co-authored-by: Barnabas Busa --- execution_chain/config.nim | 6 +- execution_chain/networking/p2p_types.nim | 1 + execution_chain/networking/rlpx.nim | 4 +- execution_chain/rpc.nim | 10 ++- execution_chain/rpc/common.nim | 95 +++++++++++++++++++++--- tests/test_configuration.nim | 8 ++ tests/test_rpc.nim | 33 +++++++- 7 files changed, 136 insertions(+), 21 deletions(-) diff --git a/execution_chain/config.nim b/execution_chain/config.nim index 6f31de76e8..1ede43ba1c 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -96,6 +96,7 @@ type ## RPC flags Eth ## enable eth_ set of RPC API Debug ## enable debug_ set of RPC API + Admin ## enable admin_ set of RPC API DiscoveryType* {.pure.} = enum None @@ -433,7 +434,7 @@ type name: "rpc" }: bool rpcApi {. - desc: "Enable specific set of RPC API (available: eth, debug)" + desc: "Enable specific set of RPC API (available: eth, debug, admin)" defaultValue: @[] defaultValueDesc: $RpcFlag.Eth name: "rpc-api" }: seq[string] @@ -444,7 +445,7 @@ type name: "ws" }: bool wsApi {. - desc: "Enable specific set of Websocket RPC API (available: eth, debug)" + desc: "Enable specific set of Websocket RPC API (available: eth, debug, admin)" defaultValue: @[] defaultValueDesc: $RpcFlag.Eth name: "ws-api" }: seq[string] @@ -684,6 +685,7 @@ proc getRpcFlags(api: openArray[string]): set[RpcFlag] = case item.toLowerAscii() of "eth": result.incl RpcFlag.Eth of "debug": result.incl RpcFlag.Debug + of "admin": result.incl RpcFlag.Admin else: error "Unknown RPC API: ", name=item quit QuitFailure diff --git a/execution_chain/networking/p2p_types.nim b/execution_chain/networking/p2p_types.nim index 2007cbf222..950bed4ed8 100644 --- a/execution_chain/networking/p2p_types.nim +++ b/execution_chain/networking/p2p_types.nim @@ -54,6 +54,7 @@ type awaitedMessages*: seq[FutureBase] # per `msgId` table snappyEnabled*: bool clientId*: string + inbound*: bool # true if connection was initiated by remote peer SeenNode* = object nodeId*: NodeId diff --git a/execution_chain/networking/rlpx.nim b/execution_chain/networking/rlpx.nim index dc24252ae6..b4b4139533 100644 --- a/execution_chain/networking/rlpx.nim +++ b/execution_chain/networking/rlpx.nim @@ -1223,7 +1223,7 @@ proc rlpxConnect*( trace "Connecting to peer" let - peer = Peer(remote: remote, network: node) + peer = Peer(remote: remote, network: node, inbound: false) deadline = sleepAsync(connectionTimeout) var error = true @@ -1320,7 +1320,7 @@ proc rlpxAccept*( initTracing(devp2pInfo, node.protocols) let - peer = Peer(network: node) + peer = Peer(network: node, inbound: true) deadline = sleepAsync(connectionTimeout) var error = true diff --git a/execution_chain/rpc.nim b/execution_chain/rpc.nim index 3109da9b67..0fb5f6380a 100644 --- a/execution_chain/rpc.nim +++ b/execution_chain/rpc.nim @@ -53,14 +53,16 @@ func installRPC(server: RpcServer, if RpcFlag.Eth in flags: setupServerAPI(serverApi, server, nimbus.ctx) + # TODO: chicken and egg problem. + # Remove comment after this PR below merged. + # https://github.com/ethpandaops/ethereum-package/pull/1092 + #if RpcFlag.Admin in flags: + setupAdminRpc(nimbus, conf, server) + # # Tracer is currently disabled # if RpcFlag.Debug in flags: # setupDebugRpc(com, nimbus.txPool, server) - server.rpc("admin_quit") do() -> string: - {.gcsafe.}: - nimbus.state = NimbusState.Stopping - result = "EXITING" proc newRpcWebsocketHandler(): RpcWebSocketHandler = let rng = HmacDrbgContext.new() diff --git a/execution_chain/rpc/common.nim b/execution_chain/rpc/common.nim index e3214a93b1..320f9ed576 100644 --- a/execution_chain/rpc/common.nim +++ b/execution_chain/rpc/common.nim @@ -8,28 +8,51 @@ # those terms. import - stint, json_rpc/server, json_rpc/errors, - ../networking/[p2p, discoveryv4/enode], + # Standard library imports are prefixed with `std/` + std/[json, sequtils], + stint, json_rpc/errors, + chronos, + ../networking/[p2p, discoveryv4/enode, peer_pool, p2p_types], ../config, ../beacon/web3_eth_conv, + ../nimbus_desc, web3/conversions +from json_rpc/server import RpcServer, rpc + {.push raises: [].} type NodePorts = object - discovery: string - listener : string + discovery*: int # TODO: Serialize `Port` into number + listener* : int # using custom serializer + + NodeInfo* = object + id* : string # UInt256 hex + name* : string + enode* : string # Enode string + ip* : string # address string + ports* : NodePorts + + PeerNetworkInfo* = object + inbound*: bool # Whether connection was initiated by remote peer + localAddress*: string # Local endpoint + remoteAddress*: string # Remote endpoint + `static`*: bool # Whether peer is static + trusted*: bool # Whether peer is trusted - NodeInfo = object - id : string # UInt256 hex - name : string - enode : string # Enode string - ip : string # address string - ports : NodePorts + PeerInfo* = object + caps*: seq[string] # Protocol capabilities + enode*: string # ENode string + id*: string # Node ID hex + name*: string # Client ID + network*: PeerNetworkInfo + protocols*: JsonNode # Protocol-specific data NodePorts.useDefaultSerializationIn JrpcConv NodeInfo.useDefaultSerializationIn JrpcConv +PeerNetworkInfo.useDefaultSerializationIn JrpcConv +PeerInfo.useDefaultSerializationIn JrpcConv proc setupCommonRpc*(node: EthereumNode, conf: NimbusConf, server: RpcServer) = server.rpc("web3_clientVersion") do() -> string: @@ -49,6 +72,9 @@ proc setupCommonRpc*(node: EthereumNode, conf: NimbusConf, server: RpcServer) = let peerCount = uint node.numPeers result = w3Qty(peerCount) +proc setupAdminRpc*(nimbus: NimbusNode, conf: NimbusConf, server: RpcServer) = + let node = nimbus.ethNode + server.rpc("admin_nodeInfo") do() -> NodeInfo: let enode = toENode(node) @@ -59,8 +85,8 @@ proc setupCommonRpc*(node: EthereumNode, conf: NimbusConf, server: RpcServer) = enode: $enode, ip: $enode.address.ip, ports: NodePorts( - discovery: $enode.address.udpPort, - listener: $enode.address.tcpPort + discovery: int(enode.address.udpPort), + listener: int(enode.address.tcpPort) ) ) @@ -71,4 +97,49 @@ proc setupCommonRpc*(node: EthereumNode, conf: NimbusConf, server: RpcServer) = if res.isOk: asyncSpawn node.connectToNode(res.get()) return true + # Weird it is, but when addPeer fails, the calee expect + # invalid params `-32602`(kurtosis test) raise (ref InvalidRequest)(code: -32602, msg: "Invalid ENode") + + server.rpc("admin_peers") do() -> seq[PeerInfo]: + var peers: seq[PeerInfo] + for peer in node.peerPool.peers: + if peer.connectionState == Connected: + let + nodeId = peer.remote.id + clientId = peer.clientId + enode = $peer.remote.node + remoteIp = $peer.remote.node.address.ip + remoteTcpPort = $peer.remote.node.address.tcpPort + localEnode = toENode(node) + localIp = $localEnode.address.ip + localTcpPort = $localEnode.address.tcpPort + caps = node.capabilities.mapIt(it.name & "/" & $it.version) + + # Create protocols object with version info + var protocolsObj = newJObject() + for capability in node.capabilities: + protocolsObj[capability.name] = %*{"version": capability.version} + + let peerInfo = PeerInfo( + caps: caps, + enode: enode, + id: nodeId.toHex, + name: clientId, + network: PeerNetworkInfo( + inbound: peer.inbound, + localAddress: localIp & ":" & localTcpPort, + remoteAddress: remoteIp & ":" & remoteTcpPort, + `static`: false, # TODO: implement static peer tracking + trusted: false # TODO: implement trusted peer tracking + ), + protocols: protocolsObj + ) + peers.add(peerInfo) + + return peers + + server.rpc("admin_quit") do() -> string: + {.gcsafe.}: + nimbus.state = NimbusState.Stopping + result = "EXITING" diff --git a/tests/test_configuration.nim b/tests/test_configuration.nim index 73fa654fcd..0847881fd4 100644 --- a/tests/test_configuration.nim +++ b/tests/test_configuration.nim @@ -104,6 +104,14 @@ proc configurationMain*() = let cx = cc.getRpcFlags() check { RpcFlag.Eth, RpcFlag.Debug } == cx + let dd = makeConfig(@["--rpc-api:admin"]) + let dx = dd.getRpcFlags() + check { RpcFlag.Admin } == dx + + let ee = makeConfig(@["--rpc-api:eth,admin"]) + let ex = ee.getRpcFlags() + check { RpcFlag.Eth, RpcFlag.Admin } == ex + test "ws-api": let conf = makeTestConfig() let flags = conf.getWsFlags() diff --git a/tests/test_rpc.nim b/tests/test_rpc.nim index b993abbada..b7f7b4de95 100644 --- a/tests/test_rpc.nim +++ b/tests/test_rpc.nim @@ -24,9 +24,10 @@ import ../execution_chain/core/eip4844, ../execution_chain/utils/utils, ../execution_chain/[common, rpc], - ../execution_chain/rpc/rpc_types, + ../execution_chain/rpc/[rpc_types, common as rpc_common], ../execution_chain/beacon/web3_eth_conv, ../execution_chain/networking/p2p, + ../execution_chain/nimbus_desc, ./test_helpers, ./macro_assembler, ./test_block_fixture @@ -232,6 +233,9 @@ proc setupEnv(envFork: HardFork = MergeFork): TestEnv = client = setupClient(server.localAddress[0].port) ctx = newEthContext() node = setupEthNode(conf, ctx, eth68, eth69) + nimbus = NimbusNode( + ethNode: node, + ) ctx.am.loadKeystores(keyStore).isOkOr: debugEcho error @@ -244,6 +248,7 @@ proc setupEnv(envFork: HardFork = MergeFork): TestEnv = setupServerAPI(serverApi, server, ctx) setupCommonRpc(node, conf, server) + setupAdminRpc(nimbus, conf, server) server.start() TestEnv( @@ -303,6 +308,8 @@ createRpcSigsFromNim(RpcClient): proc net_version(): string proc net_listening(): bool proc net_peerCount(): Quantity + proc admin_nodeInfo(): NodeInfo + proc admin_peers(): seq[PeerInfo] proc rpcMain*() = suite "Remote Procedure Calls": @@ -337,6 +344,30 @@ proc rpcMain*() = let peerCount = node.peerPool.connectedNodes.len check res == w3Qty(peerCount) + test "admin_nodeInfo": + let res = await client.admin_nodeInfo() + check: + res.id.len > 0 + res.name == env.conf.agentString + res.enode.startsWith("enode://") + res.ip.len > 0 + res.ports.discovery > 0 + res.ports.listener > 0 + + test "admin_peers": + let peers = await client.admin_peers() + check peers.len == node.peerPool.connectedNodes.len + + # If there are peers, verify the structure matches Geth specification + for peer in peers: + check: + peer.caps.len > 0 + peer.enode.startsWith("enode://") + peer.id.len > 0 + peer.name.len > 0 + peer.network.localAddress.len > 0 + peer.network.remoteAddress.len > 0 + test "eth_chainId": let res = await client.eth_chainId() check res == com.chainId From c594c6dd27d6ab54a257a643ed1707404883d5f7 Mon Sep 17 00:00:00 2001 From: kdeme <7857583+kdeme@users.noreply.github.com> Date: Wed, 2 Jul 2025 13:26:38 +0200 Subject: [PATCH 113/138] Remove Portal access for block headers in EL (#3439) --- execution_chain/core/chain/forked_chain.nim | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index ac5b20010b..8e37227ec1 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -808,12 +808,7 @@ proc headerByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Header, str return err("Requested block number not exists: " & $number) if number < c.baseBranch.tailNumber: - let hdr = c.baseTxFrame.getBlockHeader(number).valueOr: - if c.isPortalActive: - return c.portal.getHeaderByNumber(number) - else: - return err("Portal inactive, block not found, number = " & $number) - return ok(hdr) + return c.baseTxFrame.getBlockHeader(number) var branch = c.activeBranch while not branch.isNil: @@ -850,12 +845,8 @@ func safeBlock*(c: ForkedChainRef): Block = proc headerByHash*(c: ForkedChainRef, blockHash: Hash32): Result[Header, string] = c.hashToBlock.withValue(blockHash, loc): return ok(loc[].header) - let hdr = c.baseTxFrame.getBlockHeader(blockHash).valueOr: - if c.isPortalActive: - return c.portal.getHeaderByHash(blockHash) - else: - return err("Block header not found") - ok(hdr) + + c.baseTxFrame.getBlockHeader(blockHash) proc txDetailsByTxHash*(c: ForkedChainRef, txHash: Hash32): Result[(Hash32, uint64), string] = if c.txRecords.hasKey(txHash): From 1796fe743571765cf89a39f951d62170c974527a Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 2 Jul 2025 20:02:52 +0800 Subject: [PATCH 114/138] Stateless: Create ExecutionWitness type and implement encoding/decoding (#3440) * Add cli param to enable stateless provider. * Create execution witness type and implement encoding/decoding. --- execution_chain/config.nim | 9 +++++ execution_chain/stateless/witness.nim | 57 +++++++++++++++++++++++++++ tests/all_tests.nim | 1 + tests/test_stateless_witness.nim | 45 +++++++++++++++++++++ 4 files changed, 112 insertions(+) create mode 100644 execution_chain/stateless/witness.nim create mode 100644 tests/test_stateless_witness.nim diff --git a/execution_chain/config.nim b/execution_chain/config.nim index 1ede43ba1c..98e3594799 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -501,6 +501,15 @@ type defaultValueDesc: "\"jwt.hex\" in the data directory (see --data-dir)" name: "jwt-secret" .}: Option[InputFile] + statelessProviderEnabled* {. + separator: "\pSTATELESS PROVIDER OPTIONS:" + hidden + desc: "Enable the stateless provider. This turns on the features required" & + " by stateless clients such as generation and stored of block witnesses" & + " and serving these witnesses to peers over the p2p network." + defaultValue: false + name: "stateless-provider" }: bool + of `import`: maxBlocks* {. desc: "Maximum number of blocks to import" diff --git a/execution_chain/stateless/witness.nim b/execution_chain/stateless/witness.nim new file mode 100644 index 0000000000..ccb59292e3 --- /dev/null +++ b/execution_chain/stateless/witness.nim @@ -0,0 +1,57 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +import + eth/common, + eth/rlp, + results + +export + common, + results + +{.push raises: [].} + +type + ExecutionWitness* = object + state*: seq[seq[byte]] # MPT trie nodes accessed while executing the block. + codes*: seq[seq[byte]] # Contract bytecodes read while executing the block. + keys*: seq[seq[byte]] # Ordered list of access keys (address bytes or storage slots bytes). + headers*: seq[Header] # Block headers required for proving correctness of stateless execution. + # Stores the parent block headers needed to verify that the state reads are correct with respect + # to the pre-state root. + +func init*( + T: type ExecutionWitness, + state = newSeq[seq[byte]](), + codes = newSeq[seq[byte]](), + keys = newSeq[seq[byte]](), + headers = newSeq[Header]()): T = + ExecutionWitness(state: state, codes: codes, keys: keys, headers: headers) + +template addState*(witness: var ExecutionWitness, trieNode: seq[byte]) = + witness.state.add(trieNode) + +template addCode*(witness: var ExecutionWitness, code: seq[byte]) = + witness.codes.add(code) + +template addKey*(witness: var ExecutionWitness, key: seq[byte]) = + witness.keys.add(key) + +template addHeader*(witness: var ExecutionWitness, header: Header) = + witness.headers.add(header) + +func encode*(witness: ExecutionWitness): seq[byte] = + rlp.encode(witness) + +func decode*(T: type ExecutionWitness, witnessBytes: openArray[byte]): Result[T, string] = + try: + ok(rlp.decode(witnessBytes, T)) + except RlpError as e: + err(e.msg) diff --git a/tests/all_tests.nim b/tests/all_tests.nim index 059fd7a1db..2cc42f5282 100644 --- a/tests/all_tests.nim +++ b/tests/all_tests.nim @@ -36,6 +36,7 @@ import test_txpool, test_networking, test_pooled_tx, + test_stateless_witness, # These two suites are much slower than all the rest, so run them last test_blockchain_json, test_generalstate_json, diff --git a/tests/test_stateless_witness.nim b/tests/test_stateless_witness.nim new file mode 100644 index 0000000000..1652e6b2aa --- /dev/null +++ b/tests/test_stateless_witness.nim @@ -0,0 +1,45 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or +# distributed except according to those terms. + +{.used.} + +import + unittest2, + ../execution_chain/stateless/witness + +suite "Execution Witness Tests": + + test "Encoding/decoding empty witness": + var witness: ExecutionWitness + + let witnessBytes = witness.encode() + check witnessBytes.len() > 0 + echo witnessBytes + + let decodedWitness = ExecutionWitness.decode(witnessBytes) + check: + decodedWitness.isOk() + decodedWitness.get() == witness + + test "Encoding/decoding witness": + var witness = ExecutionWitness.init() + witness.addState(@[0x1.byte, 0x2, 0x3]) + witness.addCode(@[0x4.byte, 0x5, 0x6]) + witness.addKey(@[0x7.byte, 0x8, 0x9]) + witness.addHeader(Header()) + + let witnessBytes = witness.encode() + check witnessBytes.len() > 0 + echo witnessBytes + + let decodedWitness = ExecutionWitness.decode(witnessBytes) + check: + decodedWitness.isOk() + decodedWitness.get() == witness From 8a6877d4b8d5dd1e5e5b3b2b62d7e2ddea5a454f Mon Sep 17 00:00:00 2001 From: andri lim Date: Thu, 3 Jul 2025 12:21:24 +0700 Subject: [PATCH 115/138] Transform FC module internals into DAG (#3441) * Transform FC module internals into DAG * Optimize updateFinalized * no changes to chain_private * More tuning --- execution_chain/core/chain/forked_chain.nim | 735 +++++----- .../core/chain/forked_chain/chain_branch.nim | 215 +-- .../core/chain/forked_chain/chain_desc.nim | 19 +- .../chain/forked_chain/chain_serialize.nim | 225 ++- tests/test_forked_chain.nim | 1216 +++++++++-------- tests/test_forked_chain/chain_debug.nim | 56 +- 6 files changed, 1133 insertions(+), 1333 deletions(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 8e37227ec1..465b88302d 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -34,7 +34,7 @@ logScope: topics = "forked chain" export - BlockDesc, + BlockRef, ForkedChainRef, common, core_db @@ -44,35 +44,36 @@ const PersistBatchSize = 32'u64 MaxQueueSize = 9 -# ------------------------------------------------------------------------------ -# Forward declarations -# ------------------------------------------------------------------------------ - -proc updateBase(c: ForkedChainRef, newBase: BlockPos): - Future[void] {.async: (raises: [CancelledError]), gcsafe.} -func calculateNewBase(c: ForkedChainRef; - finalizedNumber: uint64; head: BlockPos): BlockPos {.gcsafe.} - # ------------------------------------------------------------------------------ # Private functions # ------------------------------------------------------------------------------ -func updateBranch(c: ForkedChainRef, - parent: BlockPos, +func appendBlock(c: ForkedChainRef, + parent: BlockRef, blk: Block, blkHash: Hash32, txFrame: CoreDbTxRef, receipts: sink seq[StoredReceipt]) = - if parent.isHead: - parent.appendBlock(blk, blkHash, txFrame, move(receipts)) - c.hashToBlock[blkHash] = parent.lastBlockPos - c.activeBranch = parent.branch - return - let newBranch = branch(parent.branch, blk, blkHash, txFrame, move(receipts)) - c.hashToBlock[blkHash] = newBranch.lastBlockPos - c.branches.add(newBranch) - c.activeBranch = newBranch + let newBlock = BlockRef( + blk : blk, + txFrame : txFrame, + receipts: move(receipts), + hash : blkHash, + parent : parent, + ) + + c.hashToBlock[blkHash] = newBlock + c.latest = newBlock + + for i, head in c.heads: + if head.hash == parent.hash: + # update existing heads + c.heads[i] = newBlock + return + + # It's a branch + c.heads.add newBlock proc fcuSetHead(c: ForkedChainRef, txFrame: CoreDbTxRef, @@ -84,100 +85,24 @@ proc fcuSetHead(c: ForkedChainRef, c.fcuHead.number = number c.fcuHead.hash = hash -proc validateBlock(c: ForkedChainRef, - parent: BlockPos, - blk: Block, finalized: bool): Future[Result[Hash32, string]] - {.async: (raises: [CancelledError]).} = - let blkHash = blk.header.computeBlockHash - - if c.hashToBlock.hasKey(blkHash): - # Block exists, just return - return ok(blkHash) - - if blkHash == c.pendingFCU: - # Resolve the hash into latestFinalizedBlockNumber - c.latestFinalizedBlockNumber = max(blk.header.number, - c.latestFinalizedBlockNumber) - - let - parentFrame = parent.txFrame - txFrame = parentFrame.txFrameBegin - - # TODO shortLog-equivalent for eth types - debug "Validating block", - blkHash, blk = ( - parentHash: blk.header.parentHash, - coinbase: blk.header.coinbase, - stateRoot: blk.header.stateRoot, - transactionsRoot: blk.header.transactionsRoot, - receiptsRoot: blk.header.receiptsRoot, - number: blk.header.number, - gasLimit: blk.header.gasLimit, - gasUsed: blk.header.gasUsed, - nonce: blk.header.nonce, - baseFeePerGas: blk.header.baseFeePerGas, - withdrawalsRoot: blk.header.withdrawalsRoot, - blobGasUsed: blk.header.blobGasUsed, - excessBlobGas: blk.header.excessBlobGas, - parentBeaconBlockRoot: blk.header.parentBeaconBlockRoot, - requestsHash: blk.header.requestsHash, - ) - - var receipts = c.processBlock(parent.header, txFrame, blk, blkHash, finalized).valueOr: - txFrame.dispose() - return err(error) - - c.writeBaggage(blk, blkHash, txFrame, receipts) - - c.updateSnapshot(blk, txFrame) - - c.updateBranch(parent, blk, blkHash, txFrame, move(receipts)) - - for i, tx in blk.transactions: - c.txRecords[computeRlpHash(tx)] = (blkHash, uint64(i)) - - # Entering base auto forward mode while avoiding forkChoice - # handled region(head - baseDistance) - # e.g. live syncing with the tip very far from from our latest head - if c.pendingFCU != zeroHash32 and - c.baseBranch.tailNumber < c.latestFinalizedBlockNumber - c.baseDistance - c.persistBatchSize: - let - head = c.activeBranch.lastBlockPos - newBaseCandidate = c.calculateNewBase(c.latestFinalizedBlockNumber, head) - prevBaseNumber = c.baseBranch.tailNumber - - await c.updateBase(newBaseCandidate) - - # If on disk head behind base, move it to base too. - let newBaseNumber = c.baseBranch.tailNumber - if newBaseNumber > prevBaseNumber: - if c.fcuHead.number < newBaseNumber: - let head = c.baseBranch.firstBlockPos - c.fcuSetHead(head.txFrame, - head.branch.tailHeader, - head.branch.tailHash, - head.branch.tailNumber) - - ok(blkHash) - -func findHeadPos(c: ForkedChainRef, hash: Hash32): Result[BlockPos, string] = - ## Find the `BlockPos` that contains the block relative to the +func findHeadPos(c: ForkedChainRef, hash: Hash32): Result[BlockRef, string] = + ## Find the `BlockRef` that contains the block relative to the ## argument `hash`. ## - c.hashToBlock.withValue(hash, val) do: - return ok(val[]) - do: - return err("Block hash is not part of any active chain") + let b = c.hashToBlock.getOrDefault(hash) + if b.isNil: + return err("Cannot find head block: " & hash.short) + ok(b) func findFinalizedPos( c: ForkedChainRef; - itHash: Hash32; - head: BlockPos, - ): Result[BlockPos, string] = + hash: Hash32; + head: BlockRef, + ): Result[BlockRef, string] = ## Find header for argument `itHash` on argument `head` ancestor chain. ## - # OK, new base stays on the argument head branch. + # OK, new finalized stays on the argument head branch. # :: # - B3 - B4 - B5 - B6 # / ^ ^ @@ -187,44 +112,40 @@ func findFinalizedPos( # A1, A2, B3, B4, B5: valid # A3, B6: invalid - # Find `itHash` on the ancestor lineage of `head` - c.hashToBlock.withValue(itHash, loc): - if loc[].number > head.number: - return err("Invalid finalizedHash: block is newer than head block") - - var - branch = head.branch - prevBranch = BranchRef(nil) + # Find `hash` on the ancestor lineage of `head` + let fin = c.hashToBlock.getOrDefault(hash) - while not branch.isNil: - if branch == loc[].branch: - if prevBranch.isNil.not and - loc[].number >= prevBranch.tailNumber: - break # invalid - return ok(loc[]) + if fin.isOk: + if fin.number > head.number: + return err("Invalid finalizedHash: block is newer than head block") - prevBranch = branch - branch = branch.parent + # There is no point traversing the DAG if there is only one branch. + # Just return the node. + if c.heads.len > 1: + loopIt(head): + if it == fin: + return ok(fin) + else: + return ok(fin) err("Invalid finalizedHash: block not in argument head ancestor lineage") func calculateNewBase( c: ForkedChainRef; finalizedNumber: uint64; - head: BlockPos; - ): BlockPos = + head: BlockRef; + ): BlockRef = ## It is required that the `finalizedNumber` argument is on the `head` chain, i.e. - ## it ranges beween `c.baseBranch.tailNumber` and - ## `head.branch.headNumber`. + ## it ranges between `c.base.number` and `head.number`. ## - ## The function returns a BlockPos containing a new base position. It is + ## The function returns a BlockRef containing a new base position. It is ## calculated as follows. ## - ## Starting at the argument `head.branch` searching backwards, the new base + ## Starting at the argument `head` searching backwards, the new base ## is the position of the block with `finalizedNumber`. ## ## Before searching backwards, the `finalizedNumber` argument might be adjusted - ## and made smaller so that a minimum distance to the head on the cursor arc + ## and made smaller so that a minimum distance to the head on the head arc ## applies. ## # It's important to have base at least `baseDistance` behind head @@ -233,31 +154,26 @@ func calculateNewBase( max(head.number, c.baseDistance) - c.baseDistance) # Do not update base. - if target <= c.baseBranch.tailNumber: - return BlockPos(branch: c.baseBranch) + if target <= c.base.number: + return c.base # If there is a new base, make sure it moves # with large enough step to accomodate for bulk # state root verification/bulk persist. - let distance = target - c.baseBranch.tailNumber + let distance = target - c.base.number if distance < c.persistBatchSize: # If the step is not large enough, do nothing. - return BlockPos(branch: c.baseBranch) - - if target >= head.branch.tailNumber: - # OK, new base stays on the argument head branch. - # :: - # - B3 - B4 - B5 - B6 - # / ^ ^ ^ - # base - A1 - A2 - A3 | | | - # | head CCH - # | - # target - # - return BlockPos( - branch: head.branch, - index : int(target - head.branch.tailNumber) - ) + return c.base + + # OK, new base stays on the argument head branch. + # :: + # - B3 - B4 - B5 - B6 + # / ^ ^ ^ + # base - A1 - A2 - A3 | | | + # | head CCH + # | + # target + # # The new base (aka target) falls out of the argument head branch, # ending up somewhere on a parent branch. @@ -270,73 +186,40 @@ func calculateNewBase( # target # # base will not move to A3 onward for this iteration - var branch = head.branch.parent - while not branch.isNil: - if target >= branch.tailNumber: - return BlockPos( - branch: branch, - index : int(target - branch.tailNumber) - ) - branch = branch.parent - - doAssert(false, "Unreachable code, finalized block outside canonical chain") - -proc removeBlockFromCache(c: ForkedChainRef, bd: BlockDesc) = - c.hashToBlock.del(bd.hash) - for tx in bd.blk.transactions: + + loopIt(head): + if it.number == target: + return it + + doAssert(false, "Unreachable code, target base should exists") + +proc removeBlockFromCache(c: ForkedChainRef, b: BlockRef) = + c.hashToBlock.del(b.hash) + for tx in b.blk.transactions: c.txRecords.del(computeRlpHash(tx)) for v in c.lastSnapshots.mitems(): - if v == bd.txFrame: + if v == b.txFrame: v = nil - bd.txFrame.dispose() + b.txFrame.dispose() -proc updateHead(c: ForkedChainRef, head: BlockPos) = - ## Update head if the new head is different from current head. - ## All branches with block number greater than head will be removed too. + # Mark it as deleted, don't delete it twice + b.txFrame = nil - c.activeBranch = head.branch +proc updateHead(c: ForkedChainRef, head: BlockRef) = + ## Update head if the new head is different from current head. - # Pruning if necessary - # :: - # - B5 - B6 - B7 - B8 - # / - # A1 - A2 - A3 - [A4] - A5 - A6 - # \ \ - # - C3 - C4 - D6 - D7 - # - # A4 is head - # 'D' and 'A5' onward will be removed - # 'C' and 'B' will stay - - let headNumber = head.number - var i = 0 - while i < c.branches.len: - let branch = c.branches[i] - - # Any branches with block number greater than head+1 should be removed. - if branch.tailNumber > headNumber + 1: - for i in countdown(branch.blocks.len-1, 0): - c.removeBlockFromCache(branch.blocks[i]) - c.branches.del(i) - # no need to increment i when we delete from c.branches. - continue - - inc i - - # Maybe the current active chain is longer than canonical chain, - # trim the branch. - for i in countdown(head.branch.len-1, head.index+1): - c.removeBlockFromCache(head.branch.blocks[i]) - - head.branch.blocks.setLen(head.index+1) c.fcuSetHead(head.txFrame, - head.branch.headHeader, - head.branch.headHash, - head.branch.headNumber) + head.header, + head.hash, + head.number) + +func uncolorAll(c: ForkedChainRef) = + for node in values(c.hashToBlock): + node.noColor() -proc updateFinalized(c: ForkedChainRef, finalized: BlockPos) = +proc updateFinalized(c: ForkedChainRef, finalized: BlockRef, fcuHead: BlockRef) = # Pruning # :: # - B5 - B6 - B7 - B8 @@ -349,33 +232,66 @@ proc updateFinalized(c: ForkedChainRef, finalized: BlockPos) = # 'B', 'D', and A5 onward will stay # 'C' will be removed - func sameLineage(brc: BranchRef, line: BranchRef): bool = - var branch = line - while not branch.isNil: - if branch == brc: - return true - branch = branch.parent - - let finalizedNumber = finalized.number - var i = 0 - while i < c.branches.len: - let branch = c.branches[i] + func reachable(head, fin: BlockRef): bool = + loopIt(head): + if it.colored: + return it == fin + false - # Any branches with tail block number less or equal - # than finalized should be removed. - if not branch.sameLineage(finalized.branch) and branch.tailNumber <= finalizedNumber: - for i in countdown(branch.blocks.len-1, 0): - c.removeBlockFromCache(branch.blocks[i]) - c.branches.del(i) - # no need to increment i when we delete from c.branches. - continue + # There is no point running this expensive algorithm + # if the chain have no branches, just move it forward. + if c.heads.len > 1: + c.uncolorAll() + loopIt(finalized): + it.color() - inc i + var + i = 0 + updateLatest = false + + while i < c.heads.len: + let head = c.heads[i] + + # Any branches not reachable from finalized + # should be removed. + if not reachable(head, finalized): + loopIt(head): + if not it.colored and it.txFrame.isNil.not: + c.removeBlockFromCache(it) + else: + break + + if head == c.latest: + updateLatest = true + + c.heads.del(i) + # no need to increment i when we delete from c.heads. + continue + + inc i + + if updateLatest: + # Previous `latest` is pruned, select a new latest + # based on longest chain reachable from fcuHead. + var candidate: BlockRef + for head in c.heads: + loopIt(head): + if it == fcuHead: + if candidate.isNil: + candidate = head + elif head.number > candidate.number: + candidate = head + break + if it.number < fcuHead.number: + break + + doAssert(candidate.isNil.not) + c.latest = candidate let txFrame = finalized.txFrame txFrame.fcuFinalized(finalized.hash, finalized.number).expect("fcuFinalized OK") -proc updateBase(c: ForkedChainRef, newBase: BlockPos): +proc updateBase(c: ForkedChainRef, base: BlockRef): Future[void] {.async: (raises: [CancelledError]), gcsafe.} = ## ## A1 - A2 - A3 D5 - D6 @@ -384,45 +300,19 @@ proc updateBase(c: ForkedChainRef, newBase: BlockPos): ## \ \ ## C2 - C3 E4 - E5 ## - ## where `B1..B5` is the `newBase.branch` arc and `[B5]` is the `newBase.headNumber`. + ## where `B1..B5` is the `base` arc and `[B5]` is the `base.head`. ## ## The `base` will be moved to position `[B3]`. - ## Both chains `A` and `C` have be removed by updateFinalized. + ## Both chains `A` and `C` have been removed by `updateFinalized`. ## `D` and `E`, and `B4` onward will stay. ## B1, B2, B3 will be persisted to DB and removed from FC. - # Cleanup in-memory blocks starting from newBase backward - # e.g. B3 backward. Switch to parent branch if needed. - - template disposeBlocks(number, branch) = - let tailNumber = branch.tailNumber - while number >= tailNumber: - c.removeBlockFromCache(branch.blocks[number - tailNumber]) - inc count - - if number == 0: - # Don't go below genesis - break - dec number - - let oldBase = c.baseBranch.tailNumber - if newBase.number == oldBase: + if base.number == c.base.number: # No update, return return - var - branch = newBase.branch - number = newBase.number - 1 - count = 0 - - let - # Cache to prevent crash after we shift - # the blocks - newBaseHash = newBase.hash - nextIndex = int(newBase.number - branch.tailNumber) - # Persist the new base block - this replaces the base tx in coredb! - for x in newBase.everyNthBlock(4): + for x in base.everyNthBlock(4): const # We cap waiting for an idle slot in case there's a lot of network traffic # taking up all CPU - we don't want to _completely_ stop processing blocks @@ -437,39 +327,16 @@ proc updateBase(c: ForkedChainRef, newBase: BlockPos): # and prevent other modules accessing expired baseTxFrame. c.baseTxFrame = x.txFrame - disposeBlocks(number, branch) - - # Update base if it indeed changed - if nextIndex > 0: - # Only remove blocks with number lower than newBase.number - var blocks = newSeqOfCap[BlockDesc](branch.len-nextIndex) - for i in nextIndex.. 1: notice "Finalized blocks persisted", nBlocks = count, - base = c.baseBranch.tailNumber, - baseHash = c.baseBranch.tailHash.short, + base = c.base.number, + baseHash = c.base.hash.short, pendingFCU = c.pendingFCU.short, resolvedFin= c.latestFinalizedBlockNumber else: debug "Finalized blocks persisted", nBlocks = count, - target = newBaseHash.short, - base = c.baseBranch.tailNumber, - baseHash = c.baseBranch.tailHash.short, + target = base.hash.short, + base = c.base.number, + baseHash = c.base.hash.short, pendingFCU = c.pendingFCU.short, resolvedFin= c.latestFinalizedBlockNumber +proc validateBlock(c: ForkedChainRef, + parent: BlockRef, + blk: Block, finalized: bool): Future[Result[Hash32, string]] + {.async: (raises: [CancelledError]).} = + let blkHash = blk.header.computeBlockHash + + if c.hashToBlock.hasKey(blkHash): + # Block exists, just return + return ok(blkHash) + + if blkHash == c.pendingFCU: + # Resolve the hash into latestFinalizedBlockNumber + c.latestFinalizedBlockNumber = max(blk.header.number, + c.latestFinalizedBlockNumber) + + let + parentFrame = parent.txFrame + txFrame = parentFrame.txFrameBegin + + # TODO shortLog-equivalent for eth types + debug "Validating block", + blkHash, blk = ( + parentHash: blk.header.parentHash, + coinbase: blk.header.coinbase, + stateRoot: blk.header.stateRoot, + transactionsRoot: blk.header.transactionsRoot, + receiptsRoot: blk.header.receiptsRoot, + number: blk.header.number, + gasLimit: blk.header.gasLimit, + gasUsed: blk.header.gasUsed, + nonce: blk.header.nonce, + baseFeePerGas: blk.header.baseFeePerGas, + withdrawalsRoot: blk.header.withdrawalsRoot, + blobGasUsed: blk.header.blobGasUsed, + excessBlobGas: blk.header.excessBlobGas, + parentBeaconBlockRoot: blk.header.parentBeaconBlockRoot, + requestsHash: blk.header.requestsHash, + ) + + var receipts = c.processBlock(parent.header, txFrame, blk, blkHash, finalized).valueOr: + txFrame.dispose() + return err(error) + + c.writeBaggage(blk, blkHash, txFrame, receipts) + + c.updateSnapshot(blk, txFrame) + + c.appendBlock(parent, blk, blkHash, txFrame, move(receipts)) + + for i, tx in blk.transactions: + c.txRecords[computeRlpHash(tx)] = (blkHash, uint64(i)) + + # Entering base auto forward mode while avoiding forkChoice + # handled region(head - baseDistance) + # e.g. live syncing with the tip very far from from our latest head + if c.pendingFCU != zeroHash32 and + c.base.number < c.latestFinalizedBlockNumber - c.baseDistance - c.persistBatchSize: + let + base = c.calculateNewBase(c.latestFinalizedBlockNumber, c.latest) + prevBase = c.base.number + + c.updateFinalized(base, base) + await c.updateBase(base) + + # If on disk head behind base, move it to base too. + if c.base.number > prevBase: + if c.fcuHead.number < c.base.number: + c.updateHead(c.base) + + ok(blkHash) + proc processQueue(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = while true: # Cooperative concurrency: one block per loop iteration - because @@ -541,16 +479,22 @@ proc init*( base = baseTxFrame.getSavedStateBlockNumber baseHash = baseTxFrame.getBlockHash(base).expect("baseHash exists") baseHeader = baseTxFrame.getBlockHeader(baseHash).expect("base header exists") - baseBranch = branch(baseHeader, baseHash, baseTxFrame) + baseBlock = BlockRef( + blk : Block(header: baseHeader), + txFrame : baseTxFrame, + hash : baseHash, + parent : BlockRef(nil), + ) fcuHead = baseTxFrame.fcuHead().valueOr: - FcuHashAndNumber(hash: baseHash, number: baseHeader.number) + FcuHashAndNumber(hash: baseHash, number: base) fcuSafe = baseTxFrame.fcuSafe().valueOr: - FcuHashAndNumber(hash: baseHash, number: baseHeader.number) - fc = T(com: com, - baseBranch: baseBranch, - activeBranch: baseBranch, - branches: @[baseBranch], - hashToBlock: {baseHash: baseBranch.lastBlockPos}.toTable, + FcuHashAndNumber(hash: baseHash, number: base) + fc = T( + com: com, + base: baseBlock, + latest: baseBlock, + heads: @[baseBlock], + hashToBlock: {baseHash: baseBlock}.toTable, baseTxFrame: baseTxFrame, baseDistance: baseDistance, persistBatchSize:persistBatchSize, @@ -578,14 +522,15 @@ proc importBlock*(c: ForkedChainRef, blk: Block, finalized = false): template header(): Header = blk.header - c.hashToBlock.withValue(header.parentHash, parentPos) do: + let parent = c.hashToBlock.getOrDefault(header.parentHash) + if parent.isOk: # TODO: If engine API keep importing blocks # but not finalized it, e.g. current chain length > StagedBlocksThreshold # We need to persist some of the in-memory stuff # to a "staging area" or disk-backed memory but it must not afect `base`. # `base` is the point of no return, we only update it on finality. - var parentHash = ?(await c.validateBlock(parentPos[], blk, finalized)) + var parentHash = ?(await c.validateBlock(parent, blk, finalized)) while c.quarantine.hasOrphans(): const @@ -600,15 +545,16 @@ proc importBlock*(c: ForkedChainRef, blk: Block, finalized = false): let orphan = c.quarantine.popOrphan(parentHash).valueOr: break - c.hashToBlock.withValue(parentHash, parentCandidatePos) do: - parentHash = (await c.validateBlock(parentCandidatePos[], orphan, finalized)).valueOr: + let parent = c.hashToBlock.getOrDefault(parentHash) + if parent.isOk: + parentHash = (await c.validateBlock(parent, orphan, finalized)).valueOr: # Silent? # We don't return error here because the import is still ok() # but the quarantined blocks may not linked break - do: + else: break - do: + else: # If its parent is an invalid block # there is no hope the descendant is valid let blockHash = header.computeBlockHash @@ -633,13 +579,13 @@ proc forkChoice*(c: ForkedChainRef, c.pendingFCU = finalizedHash if safeHash != zeroHash32: - c.hashToBlock.withValue(safeHash, loc): - let number = loc[].number - c.fcuSafe.number = number + let safe = c.hashToBlock.getOrDefault(safeHash) + if safe.isOk: + c.fcuSafe.number = safe.number c.fcuSafe.hash = safeHash - ?loc[].txFrame.fcuSafe(c.fcuSafe) + ?safe.txFrame.fcuSafe(c.fcuSafe) - if headHash == c.activeBranch.headHash: + if headHash == c.latest.hash: if finalizedHash == zeroHash32: # Do nothing if the new head already our current head # and there is no request to new finality. @@ -659,26 +605,21 @@ proc forkChoice*(c: ForkedChainRef, # skip updateBase and updateFinalized if finalizedHash is zero. return ok() - c.updateFinalized(finalized) + c.updateFinalized(finalized, head) let - finalizedNumber = finalized.number - newBase = c.calculateNewBase(finalizedNumber, head) + base = c.calculateNewBase(finalized.number, head) - if newBase.hash == c.baseBranch.tailHash: + if base == c.base: # The base is not updated, return. return ok() - # Cache the base block number, updateBase might - # alter the BlockPos.index - let newBaseNumber = newBase.number - # At this point head.number >= base.number. # At this point finalized.number is <= head.number, # and possibly switched to other chain beside the one with head. - doAssert(finalizedNumber <= head.number) - doAssert(newBaseNumber <= finalizedNumber) - await c.updateBase(newBase) + doAssert(finalized.number <= head.number) + doAssert(base.number <= finalized.number) + await c.updateBase(base) ok() @@ -722,7 +663,7 @@ func haveBlockAndState*(c: ForkedChainRef, blockHash: Hash32): bool = c.hashToBlock.hasKey(blockHash) func txFrame*(c: ForkedChainRef, blockHash: Hash32): CoreDbTxRef = - if blockHash == c.baseBranch.tailHash: + if blockHash == c.base.hash: return c.baseTxFrame c.hashToBlock.withValue(blockHash, loc) do: @@ -737,7 +678,7 @@ func txFrame*(c: ForkedChainRef, header: Header): CoreDbTxRef = c.txFrame(header.computeBlockHash()) func latestTxFrame*(c: ForkedChainRef): CoreDbTxRef = - c.activeBranch.headTxFrame + c.latest.txFrame func com*(c: ForkedChainRef): CommonRef = c.com @@ -746,19 +687,19 @@ func db*(c: ForkedChainRef): CoreDbRef = c.com.db func latestHeader*(c: ForkedChainRef): Header = - c.activeBranch.headHeader + c.latest.header func latestNumber*(c: ForkedChainRef): BlockNumber = - c.activeBranch.headNumber + c.latest.number func latestHash*(c: ForkedChainRef): Hash32 = - c.activeBranch.headHash + c.latest.hash func baseNumber*(c: ForkedChainRef): BlockNumber = - c.baseBranch.tailNumber + c.base.number func baseHash*(c: ForkedChainRef): Hash32 = - c.baseBranch.tailHash + c.base.hash func txRecords*(c: ForkedChainRef, txHash: Hash32): (Hash32, uint64) = c.txRecords.getOrDefault(txHash, (Hash32.default, 0'u64)) @@ -772,15 +713,11 @@ func isHistoryExpiryActive*(c: ForkedChainRef): bool = func isPortalActive(c: ForkedChainRef): bool = (not c.portal.isNil) and c.portal.portalEnabled -func memoryBlock*(c: ForkedChainRef, blockHash: Hash32): BlockDesc = - c.hashToBlock.withValue(blockHash, loc): - return loc.branch.blocks[loc.index] - # Return default(BlockDesc) - func memoryTransaction*(c: ForkedChainRef, txHash: Hash32): Opt[(Transaction, BlockNumber)] = let (blockHash, index) = c.txRecords.getOrDefault(txHash, (Hash32.default, 0'u64)) - c.hashToBlock.withValue(blockHash, loc) do: - return Opt.some( (loc[].tx(index), loc[].number) ) + let b = c.hashToBlock.getOrDefault(blockHash) + if b.isOk: + return Opt.some( (b.blk.transactions[index], b.number) ) return Opt.none((Transaction, BlockNumber)) func memoryTxHashesForBlock*(c: ForkedChainRef, blockHash: Hash32): Opt[seq[Hash32]] = @@ -798,23 +735,21 @@ func memoryTxHashesForBlock*(c: ForkedChainRef, blockHash: Hash32): Opt[seq[Hash Opt.some(cachedTxHashes.mapIt(it[0])) proc latestBlock*(c: ForkedChainRef): Block = - if c.activeBranch.headNumber == c.baseBranch.tailNumber: + if c.latest.number == c.base.number: # It's a base block - return c.baseTxFrame.getEthBlock(c.activeBranch.headHash).expect("cursorBlock exists") - c.activeBranch.blocks[^1].blk + return c.baseTxFrame.getEthBlock(c.latest.hash).expect("baseBlock exists") + c.latest.blk proc headerByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Header, string] = - if number > c.activeBranch.headNumber: + if number > c.latest.number: return err("Requested block number not exists: " & $number) - if number < c.baseBranch.tailNumber: + if number < c.base.number: return c.baseTxFrame.getBlockHeader(number) - var branch = c.activeBranch - while not branch.isNil: - if number >= branch.tailNumber: - return ok(branch.blocks[number - branch.tailNumber].blk.header) - branch = branch.parent + loopIt(c.latest): + if number == it.number: + return ok(it.header) err("Block not found, number = " & $number) @@ -822,25 +757,25 @@ func finalizedHeader*(c: ForkedChainRef): Header = c.hashToBlock.withValue(c.pendingFCU, loc): return loc[].header - c.baseBranch.tailHeader + c.base.header func safeHeader*(c: ForkedChainRef): Header = c.hashToBlock.withValue(c.fcuSafe.hash, loc): return loc[].header - c.baseBranch.tailHeader + c.base.header func finalizedBlock*(c: ForkedChainRef): Block = c.hashToBlock.withValue(c.pendingFCU, loc): return loc[].blk - c.baseBranch.tailBlock + c.base.blk func safeBlock*(c: ForkedChainRef): Block = c.hashToBlock.withValue(c.fcuSafe.hash, loc): return loc[].blk - c.baseBranch.tailBlock + c.base.blk proc headerByHash*(c: ForkedChainRef, blockHash: Hash32): Result[Header, string] = c.hashToBlock.withValue(blockHash, loc): @@ -900,10 +835,10 @@ proc payloadBodyV1ByHash*(c: ForkedChainRef, blockHash: Hash32): Result[Executio move(blk) proc payloadBodyV1ByNumber*(c: ForkedChainRef, number: BlockNumber): Result[ExecutionPayloadBodyV1, string] = - if number > c.activeBranch.headNumber: + if number > c.latest.number: return err("Requested block number not exists: " & $number) - if number <= c.baseBranch.tailNumber: + if number <= c.base.number: let header = ?c.baseTxFrame.getBlockHeader(number) blk = c.baseTxFrame.getExecutionPayloadBodyV1(header) @@ -917,19 +852,17 @@ proc payloadBodyV1ByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Exec return blk - var branch = c.activeBranch - while not branch.isNil: - if number >= branch.tailNumber: - return ok(toPayloadBody(branch.blocks[number - branch.tailNumber].blk)) - branch = branch.parent + loopIt(c.latest): + if number >= it.number: + return ok(toPayloadBody(it.blk)) err("Block not found, number = " & $number) proc blockByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Block, string] = - if number > c.activeBranch.headNumber: + if number > c.latest.number: return err("Requested block number not exists: " & $number) - if number <= c.baseBranch.tailNumber: + if number <= c.base.number: let blk = c.baseTxFrame.getEthBlock(number) # Txs not there in db - Happens during era1/era import, when we don't store txs and receipts if blk.isErr or (blk.get.transactions.len == 0 and blk.get.header.transactionsRoot != emptyRoot): @@ -939,11 +872,9 @@ proc blockByNumber*(c: ForkedChainRef, number: BlockNumber): Result[Block, strin else: return blk - var branch = c.activeBranch - while not branch.isNil: - if number >= branch.tailNumber: - return ok(branch.blocks[number - branch.tailNumber].blk) - branch = branch.parent + loopIt(c.latest): + if number >= it.number: + return ok(it.blk) err("Block not found, number = " & $number) @@ -953,7 +884,7 @@ proc blockHeader*(c: ForkedChainRef, blk: BlockHashOrNumber): Result[Header, str c.headerByNumber(blk.number) proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[StoredReceipt], string] = - if blockHash != c.baseBranch.tailHash: + if blockHash != c.base.hash: c.hashToBlock.withValue(blockHash, loc): return ok(loc[].receipts) @@ -965,54 +896,43 @@ proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[Stor func payloadBodyV1FromBaseTo*(c: ForkedChainRef, last: BlockNumber, list: var seq[Opt[ExecutionPayloadBodyV1]]) = - # return block in reverse order var - branch = c.activeBranch - branches = newSeqOfCap[BranchRef](c.branches.len) - - while not branch.isNil: - branches.add(branch) - branch = branch.parent - - for i in countdown(branches.len-1, 0): - branch = branches[i] - for y in 0.. last: - return - list.add Opt.some(toPayloadBody(bd.blk)) - -func equalOrAncestorOf*(c: ForkedChainRef, blockHash: Hash32, childHash: Hash32): bool = - if blockHash == childHash: + blocks = newSeqOfCap[BlockRef](last-c.base.number+1) + + loopIt(c.latest): + if it.number <= last: + blocks.add(it) + + for i in countdown(blocks.len-1, 0): + let y = blocks[i] + list.add Opt.some(toPayloadBody(y.blk)) + +func equalOrAncestorOf*(c: ForkedChainRef, blockHash: Hash32, headHash: Hash32): bool = + if blockHash == headHash: return true - c.hashToBlock.withValue(childHash, childLoc): - c.hashToBlock.withValue(blockHash, loc): - var branch = childLoc.branch - while not branch.isNil: - if loc.branch == branch: - return true - branch = branch.parent + let head = c.hashToBlock.getOrDefault(headHash) + loopIt(head): + if it.hash == blockHash: + return true false proc isCanonicalAncestor*(c: ForkedChainRef, blockNumber: BlockNumber, blockHash: Hash32): bool = - if blockNumber >= c.activeBranch.headNumber: + if blockNumber >= c.latest.number: return false - if blockHash == c.activeBranch.headHash: + if blockHash == c.latest.hash: return false - if c.baseBranch.tailNumber < c.activeBranch.headNumber: + if c.base.number < c.latest.number: # The current canonical chain in memory is headed by - # activeBranch.header - var branch = c.activeBranch - while not branch.isNil: - if branch.hasHashAndNumber(blockHash, blockNumber): + # latest.header + loopIt(c.latest): + if it.hash == blockHash and it.number == blockNumber: return true - branch = branch.parent # canonical chain in database should have a marker # and the marker is block number @@ -1024,15 +944,10 @@ iterator txHashInRange*(c: ForkedChainRef, fromHash: Hash32, toHash: Hash32): Ha ## toHash should be ancestor of fromHash ## exclude base from iteration, new block produced by txpool ## should not reach base - let baseHash = c.baseBranch.tailHash - var prevHash = fromHash - while prevHash != baseHash: - c.hashToBlock.withValue(prevHash, loc) do: - if toHash == prevHash: - break - for tx in loc[].transactions: - let txHash = computeRlpHash(tx) - yield txHash - prevHash = loc[].parentHash - do: + let head = c.hashToBlock.getOrDefault(fromHash) + loopIt(head): + if toHash == it.hash: break + for tx in it.blk.transactions: + let txHash = computeRlpHash(tx) + yield txHash diff --git a/execution_chain/core/chain/forked_chain/chain_branch.nim b/execution_chain/core/chain/forked_chain/chain_branch.nim index 6eef09b62c..672730e85a 100644 --- a/execution_chain/core/chain/forked_chain/chain_branch.nim +++ b/execution_chain/core/chain/forked_chain/chain_branch.nim @@ -16,180 +16,63 @@ import ../../../db/core_db type - BlockDesc* = object + BlockRef* = ref object blk* : Block txFrame* : CoreDbTxRef receipts*: seq[StoredReceipt] hash* : Hash32 + parent* : BlockRef - BlockPos* = object - branch*: BranchRef - index* : int - - BranchRef* = ref object - blocks*: seq[BlockDesc] - parent*: BranchRef - # If parent.isNil: it is a base branch - - index* : uint - # Used by serializer, a replacement for parent - # 0: nil - # >0: index-1 to branches list - # - # Also used as a flag when replaying state - # after deserialize. - - TxFrameAndStateRoot* = object - txFrame* : CoreDbTxRef - stateRoot*: Hash32 - -func tailBlock*(brc: BranchRef): Block = - brc.blocks[0].blk - -func tailNumber*(brc: BranchRef): BlockNumber = - brc.blocks[0].blk.header.number - -func headNumber*(brc: BranchRef): BlockNumber = - brc.blocks[^1].blk.header.number + index* : uint + # Alias to parent when serializing + # Also used for DAG node coloring -func tailHash*(brc: BranchRef): Hash32 = - brc.blocks[0].hash - -func headHash*(brc: BranchRef): Hash32 = - brc.blocks[^1].hash - -func len*(brc: BranchRef): int = - brc.blocks.len - -func headTxFrame*(brc: BranchRef): CoreDbTxRef = - brc.blocks[^1].txFrame - -func tailHeader*(brc: BranchRef): Header = - brc.blocks[0].blk.header - -func headHeader*(brc: BranchRef): Header = - brc.blocks[^1].blk.header - -func append*(brc: BranchRef, blk: BlockDesc) = - brc.blocks.add(blk) - -func lastBlockPos*(brc: BranchRef): BlockPos = - BlockPos( - branch: brc, - index : brc.len - 1, - ) - -func firstBlockPos*(brc: BranchRef): BlockPos = - BlockPos( - branch: brc, - index : 0, - ) - -func `==`*(a, b: BranchRef): bool = - a.headHash == b.headHash - -func hasHashAndNumber*(brc: BranchRef, hash: Hash32, number: BlockNumber): bool = - for i in 0.. step and number > tailNumber: - let bd = addr branch.blocks[number-tailNumber] - steps.add TxFrameAndStateRoot( - txFrame : bd.txFrame, - stateRoot: bd.blk.header.stateRoot - ) + number = base.number - min(base.number, step) + steps = newSeqOfCap[BlockRef](128) + + steps.add base + + loopIt(base): + if it.number == number: + steps.add it number -= min(number, step) - branch = branch.parent for i in countdown(steps.len-1, 0): yield steps[i] diff --git a/execution_chain/core/chain/forked_chain/chain_desc.nim b/execution_chain/core/chain/forked_chain/chain_desc.nim index 91beaba5b0..326b3272d8 100644 --- a/execution_chain/core/chain/forked_chain/chain_desc.nim +++ b/execution_chain/core/chain/forked_chain/chain_desc.nim @@ -28,16 +28,19 @@ type ForkedChainRef* = ref object com*: CommonRef - hashToBlock* : Table[Hash32, BlockPos] - # A map of block hash to a block position in a branch. + hashToBlock* : Table[Hash32, BlockRef] + # A map of block hash to a block. - branches* : seq[BranchRef] - baseBranch* : BranchRef - # A branch contain the base block + base* : BlockRef + # The base block, the last block stored in database. + # Any blocks newer than base is kept in memory. - activeBranch*: BranchRef - # Every time a new block added to a branch, - # that branch automatically become the active branch. + latest* : BlockRef + # Every time a new block added, + # that block automatically become the latest block. + + heads* : seq[BlockRef] + # Candidate heads of candidate chains quarantine* : Quarantine diff --git a/execution_chain/core/chain/forked_chain/chain_serialize.nim b/execution_chain/core/chain/forked_chain/chain_serialize.nim index d291114b78..fe5cd88de5 100644 --- a/execution_chain/core/chain/forked_chain/chain_serialize.nim +++ b/execution_chain/core/chain/forked_chain/chain_serialize.nim @@ -31,9 +31,10 @@ type blockNumber: uint64 FcState = object - numBranches: uint - baseBranch: uint - activeBranch: uint + numBlocks: uint + base: uint + latest: uint + heads: seq[uint] pendingFCU: Hash32 latestFinalizedBlockNumber: uint64 txRecords: seq[TxRecord] @@ -44,23 +45,25 @@ type # RLP serializer functions # ------------------------------------------------------------------------------ -proc append(w: var RlpWriter, bd: BlockDesc) = - w.startList(2) - w.append(bd.blk) - w.append(bd.hash) - -proc append(w: var RlpWriter, brc: BranchRef) = - w.startList(2) - let parentIndex = if brc.parent.isNil: 0'u - else: brc.parent.index + 1'u +proc append(w: var RlpWriter, b: BlockRef) = + w.startList(3) + w.append(b.blk) + w.append(b.hash) + let parentIndex = if b.parent.isNil: 0'u + else: b.parent.index + 1'u w.append(parentIndex) - w.append(brc.blocks) proc append(w: var RlpWriter, fc: ForkedChainRef) = - w.startList(8) - w.append(fc.branches.len.uint) - w.append(fc.baseBranch.index) - w.append(fc.activeBranch.index) + w.startList(9) + w.append(fc.hashToBlock.len.uint) + w.append(fc.base.index) + w.append(fc.latest.index) + + var heads = newSeqOfCap[uint](fc.heads.len) + for h in fc.heads: + heads.add h.index + + w.append(heads) w.append(fc.pendingFCU) w.append(fc.latestFinalizedBlockNumber) w.startList(fc.txRecords.len) @@ -73,23 +76,19 @@ proc append(w: var RlpWriter, fc: ForkedChainRef) = w.append(fc.fcuHead) w.append(fc.fcuSafe) -proc read(rlp: var Rlp, T: type BlockDesc): T {.raises: [RlpError].} = +proc read(rlp: var Rlp, T: type BlockRef): T {.raises: [RlpError].} = rlp.tryEnterList() result = T() rlp.read(result.blk) rlp.read(result.hash) - -proc read(rlp: var Rlp, T: type BranchRef): T {.raises: [RlpError].} = - rlp.tryEnterList() - result = T() rlp.read(result.index) - rlp.read(result.blocks) proc read(rlp: var Rlp, T: type FcState): T {.raises: [RlpError].} = rlp.tryEnterList() - rlp.read(result.numBranches) - rlp.read(result.baseBranch) - rlp.read(result.activeBranch) + rlp.read(result.numBlocks) + rlp.read(result.base) + rlp.read(result.latest) + rlp.read(result.heads) rlp.read(result.pendingFCU) rlp.read(result.latestFinalizedBlockNumber) rlp.read(result.txRecords) @@ -104,7 +103,7 @@ const # The state always use 0 index FcStateKey = fcStateKey 0 -template branchIndexKey(i: SomeInteger): openArray[byte] = +template blockIndexKey(i: SomeInteger): openArray[byte] = # We reuse the fcStateKey but +1 fcStateKey((i+1).uint).toOpenArray @@ -121,102 +120,105 @@ proc getState(db: CoreDbTxRef): Opt[FcState] = err() proc replayBlock(fc: ForkedChainRef; - parent: BlockPos, - bd: var BlockDesc): Result[void, string] = + parent: BlockRef, + blk: BlockRef): Result[void, string] = let parentFrame = parent.txFrame txFrame = parentFrame.txFrameBegin - var receipts = fc.processBlock(parent.header, txFrame, bd.blk, bd.hash, false).valueOr: + var receipts = fc.processBlock(parent.header, txFrame, blk.blk, blk.hash, false).valueOr: txFrame.dispose() return err(error) - fc.writeBaggage(bd.blk, bd.hash, txFrame, receipts) - fc.updateSnapshot(bd.blk, txFrame) + fc.writeBaggage(blk.blk, blk.hash, txFrame, receipts) + fc.updateSnapshot(blk.blk, txFrame) - bd.txFrame = txFrame - bd.receipts = move(receipts) + blk.txFrame = txFrame + blk.receipts = move(receipts) ok() proc replayBranch(fc: ForkedChainRef; - parent: BlockPos; - branch: BranchRef; - start: int;): Result[void, string] = - - var parent = parent - for i in start.. parent.number: + blocks.add it - for brc in fc.branches: - # Skip already replayed branch - if brc.index == 0: - continue - - if brc.parent == branch: - doAssert(brc.tailNumber > branch.tailNumber) - doAssert((brc.tailNumber - branch.tailNumber) > 0) - parent.index = int(brc.tailNumber - branch.tailNumber - 1) - ?fc.replayBranch(parent, brc, 0) + var parent = parent + for i in countdown(blocks.len-1, 0): + ?fc.replayBlock(parent, blocks[i]) + parent = blocks[i] ok() proc replay(fc: ForkedChainRef): Result[void, string] = # Should have no parent - doAssert fc.baseBranch.index == 0 - doAssert fc.baseBranch.parent.isNil + doAssert fc.base.index == 0 + doAssert fc.base.parent.isNil # Receipts for base block are loaded from database # see `receiptsByBlockHash` - fc.baseBranch.blocks[0].txFrame = fc.baseTxFrame - - # Replay, exclude base block, start from 1 - let parent = BlockPos( - branch: fc.baseBranch - ) - fc.replayBranch(parent, fc.baseBranch, 1) - -proc reset(fc: ForkedChainRef, branches: sink seq[BranchRef]) = - let baseBranch = branches[0] - - fc.baseBranch = baseBranch - fc.activeBranch = baseBranch - fc.branches = move(branches) - fc.hashToBlock = {baseBranch.tailHash: baseBranch.lastBlockPos}.toTable - fc.pendingFCU = zeroHash32 + fc.base.txFrame = fc.baseTxFrame + fc.base.color() + + for head in fc.heads: + loopIt(head): + if it.colored: + ?fc.replayBranch(it, head) + break + + ok() + +proc reset(fc: ForkedChainRef, base: BlockRef) = + fc.base = base + fc.latest = base + fc.heads = @[base] + fc.hashToBlock = {base.hash: base}.toTable + fc.pendingFCU = zeroHash32 fc.latestFinalizedBlockNumber = 0'u64 fc.txRecords.clear() fc.fcuHead.reset() fc.fcuSafe.reset() +func toString(list: openArray[BlockRef]): string = + result.add '[' + for i, b in list: + result.add $(b.number) + if i < list.len-1: + result.add ',' + result.add ']' + # ------------------------------------------------------------------------------ # Public functions # ------------------------------------------------------------------------------ proc serialize*(fc: ForkedChainRef, txFrame: CoreDbTxRef): Result[void, CoreDbError] = - for i, brc in fc.branches: - brc.index = uint i + var i = 0 + for b in fc.hashToBlock.values: + b.index = uint i + inc i + ?txFrame.put(FcStateKey.toOpenArray, rlp.encode(fc)) - var numBlocks = 0 - for i, brc in fc.branches: - numBlocks += brc.len - ?txFrame.put(branchIndexKey(i), rlp.encode(brc)) + + for b in fc.hashToBlock.values: + ?txFrame.put(blockIndexKey(b.index), rlp.encode(b)) info "Blocks DAG written to database", - base=fc.baseBranch.tailNumber, - baseHash=fc.baseBranch.tailHash.short, - latest=fc.activeBranch.headNumber, - latestHash=fc.activeBranch.headHash.short, + base=fc.base.number, + baseHash=fc.base.hash.short, + latest=fc.latest.number, + latestHash=fc.latest.hash.short, head=fc.fcuHead.number, headHash=fc.fcuHead.hash.short, finalized=fc.latestFinalizedBlockNumber, finalizedHash=fc.pendingFCU.short, - blocksInMemory=numBlocks + blocksInMemory=fc.hashToBlock.len, + heads=fc.heads.toString ok() @@ -224,63 +226,58 @@ proc deserialize*(fc: ForkedChainRef): Result[void, string] = let state = fc.baseTxFrame.getState().valueOr: return err("Cannot find previous FC state in database") - let prevBaseHash = fc.baseBranch.tailHash - var - branches = move(fc.branches) - numBlocksStored = 0 + let prevBase = fc.base + var blocks = newSeq[BlockRef](state.numBlocks) - fc.branches.setLen(state.numBranches) try: - for i in 0.. 64: + if state.numBlocks > 64: info "Please wait until DAG finish loading..." - if fc.baseBranch.tailHash != prevBaseHash: - fc.reset(branches) + if fc.base.hash != prevBase.hash: + fc.reset(prevBase) return err("loaded baseHash != baseHash") for tx in state.txRecords: fc.txRecords[tx.txHash] = (tx.blockHash, tx.blockNumber) - for brc in fc.branches: - if brc.index > 0: - brc.parent = fc.branches[brc.index-1] - - for i in 0.. 0: + b.parent = blocks[b.index-1] + fc.hashToBlock[b.hash] = b + b.noColor() # prepare for replay fc.replay().isOkOr: - fc.reset(branches) + fc.reset(prevBase) return err(error) fc.hashToBlock.withValue(fc.fcuHead.hash, val) do: diff --git a/tests/test_forked_chain.nim b/tests/test_forked_chain.nim index 7c61a94173..85fcf6020c 100644 --- a/tests/test_forked_chain.nim +++ b/tests/test_forked_chain.nim @@ -19,6 +19,7 @@ import ../execution_chain/core/chain/forked_chain, ../execution_chain/core/chain/forked_chain/chain_desc, ../execution_chain/core/chain/forked_chain/chain_serialize, + ../execution_chain/core/chain/forked_chain/chain_branch, ../execution_chain/db/ledger, ../execution_chain/db/era1_db, ../execution_chain/db/fcu_db, @@ -171,612 +172,623 @@ template checkPersisted(chain, blk) = debugEcho "CHECK FINALIZED FAIL: ", res.error debugEcho "Block Number: ", blk.header.number -proc forkedChainMain*() = - suite "ForkedChainRef tests": - var env = setupEnv() - let - cc = env.newCom - genesisHash = cc.genesisHeader.computeBlockHash - genesis = Block.init(cc.genesisHeader, BlockBody()) - baseTxFrame = cc.db.baseTxFrame() - txFrame = baseTxFrame.txFrameBegin - let - blk1 = txFrame.makeBlk(1, genesis) - blk2 = txFrame.makeBlk(2, blk1) - blk3 = txFrame.makeBlk(3, blk2) - dbTx = txFrame.txFrameBegin - blk4 = dbTx.makeBlk(4, blk3) - blk5 = dbTx.makeBlk(5, blk4) - blk6 = dbTx.makeBlk(6, blk5) - blk7 = dbTx.makeBlk(7, blk6) - dbTx.dispose() - let - B4 = txFrame.makeBlk(4, blk3, 1.byte) - dbTx2 = txFrame.txFrameBegin - B5 = dbTx2.makeBlk(5, B4) - B6 = dbTx2.makeBlk(6, B5) - B7 = dbTx2.makeBlk(7, B6) - dbTx2.dispose() +suite "ForkedChainRef tests": + var env = setupEnv() + let + cc = env.newCom + genesisHash = cc.genesisHeader.computeBlockHash + genesis = Block.init(cc.genesisHeader, BlockBody()) + baseTxFrame = cc.db.baseTxFrame() + txFrame = baseTxFrame.txFrameBegin + let + blk1 = txFrame.makeBlk(1, genesis) + blk2 = txFrame.makeBlk(2, blk1) + blk3 = txFrame.makeBlk(3, blk2) + dbTx = txFrame.txFrameBegin + blk4 = dbTx.makeBlk(4, blk3) + blk5 = dbTx.makeBlk(5, blk4) + blk6 = dbTx.makeBlk(6, blk5) + blk7 = dbTx.makeBlk(7, blk6) + dbTx.dispose() + let + B4 = txFrame.makeBlk(4, blk3, 1.byte) + dbTx2 = txFrame.txFrameBegin + B5 = dbTx2.makeBlk(5, B4) + B6 = dbTx2.makeBlk(6, B5) + B7 = dbTx2.makeBlk(7, B6) + dbTx2.dispose() + let + C5 = txFrame.makeBlk(5, blk4, 1.byte) + C6 = txFrame.makeBlk(6, C5) + C7 = txFrame.makeBlk(7, C6) + txFrame.dispose() + + test "newBase == oldBase": + const info = "newBase == oldBase" + let com = env.newCom() + var chain = ForkedChainRef.init(com) + # same header twice + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + check chain.validate info & " (1)" + # no parent + checkImportBlockErr(chain, blk5) + checkHeadHash chain, genesisHash + check chain.latestHash == blk3.blockHash + check chain.validate info & " (2)" + # finalized > head -> error + checkForkChoiceErr(chain, blk1, blk3) + check chain.validate info & " (3)" + # blk4 is not part of chain + checkForkChoiceErr(chain, blk4, blk2) + # finalized > head -> error + checkForkChoiceErr(chain, blk1, blk2) + # blk4 is not part of chain + checkForkChoiceErr(chain, blk2, blk4) + # finalized < head -> ok + checkForkChoice(chain, blk2, blk1) + checkHeadHash chain, blk2.blockHash + check chain.latestHash == blk3.blockHash + check chain.validate info & " (7)" + # finalized == head -> ok + checkForkChoice(chain, blk2, blk2) + checkHeadHash chain, blk2.blockHash + check chain.latestHash == blk3.blockHash + check chain.baseNumber == 0'u64 + check chain.validate info & " (8)" + # baggage written + check chain.wdWritten(blk1) == 1 + check chain.wdWritten(blk2) == 2 + check chain.validate info & " (9)" + + test "newBase on activeBranch": + const info = "newBase on activeBranch" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, blk4) + check chain.validate info & " (1)" + # newbase == head + checkForkChoice(chain, blk7, blk6) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + check chain.heads.len == 1 + check chain.wdWritten(blk7) == 7 + # head - baseDistance must been persisted + checkPersisted(chain, blk3) + + # It is FC module who is responsible for saving + # finalized hash on a correct txFrame. + let txFrame = chain.txFrame(blk6.blockHash) + let savedFinalized = txFrame.fcuFinalized().expect("OK") + check blk6.blockHash == savedFinalized.hash + + # make sure aristo not wipe out baggage + check chain.wdWritten(blk3) == 3 + check chain.validate info & " (9)" + + test "newBase between oldBase and head": + const info = "newBase between oldBase and head" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk6) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + check chain.heads.len == 1 + check chain.wdWritten(blk6) == 6 + check chain.wdWritten(blk7) == 7 + # head - baseDistance must been persisted + checkPersisted(chain, blk3) + # make sure aristo not wipe out baggage + check chain.wdWritten(blk3) == 3 + check chain.validate info & " (9)" + + test "newBase == oldBase, fork and stay on that fork": + const info = "newBase == oldBase, fork .." + let com = env.newCom() + var chain = ForkedChainRef.init(com) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, B7, B5) + checkHeadHash chain, B7.blockHash + check chain.latestHash == B7.blockHash + check chain.baseNumber == 0'u64 + check chain.heads.len == 1 # B become canonical + check chain.hashToBlock.len == 8 # 0,1,2,3,B4,B5,B6,B7 + check chain.validate info & " (9)" + + test "newBase move forward, fork and stay on that fork": + const info = "newBase move forward, fork .." + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + checkImportBlock(chain, B4) + check chain.validate info & " (1)" + checkForkChoice(chain, B6, B4) + check chain.validate info & " (2)" + checkHeadHash chain, B6.blockHash + check chain.latestHash == B7.blockHash + check chain.baseNumber == 3'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase on shorter canonical arc, remove oldBase branches": + const info = "newBase on shorter canonical, remove oldBase branches" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, B7, B6) + check chain.validate info & " (2)" + checkHeadHash chain, B7.blockHash + check chain.latestHash == B7.blockHash + check chain.baseNumber == 4'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase on curbed non-canonical arc": + const info = "newBase on curbed non-canonical .." + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 5, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, B7, B5) + check chain.validate info & " (2)" + checkHeadHash chain, B7.blockHash + check chain.latestHash == B7.blockHash + check chain.baseNumber > 0 + check chain.baseNumber < B4.header.number + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase == oldBase, fork and return to old chain": + const info = "newBase == oldBase, fork .." + let com = env.newCom() + var chain = ForkedChainRef.init(com) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk5) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + check chain.baseNumber == 0'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase on activeBranch, fork and return to old chain": + const info = "newBase on activeBranch, fork .." + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + checkImportBlock(chain, blk4) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk6) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + check chain.heads.len == 1 + check chain.base.number == 0 + check chain.validate info & " (9)" + + test "newBase on shorter canonical arc, discard arc with oldBase" & + " (ign dup block)": + const info = "newBase on shorter canonical .." + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + checkImportBlock(chain, blk4) + check chain.validate info & " (1)" + checkForkChoice(chain, B7, B5) + check chain.validate info & " (2)" + checkHeadHash chain, B7.blockHash + check chain.latestHash == B7.blockHash + check chain.baseNumber == 4'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase on longer canonical arc, discard new branch": + const info = "newBase on longer canonical .." + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk5) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + check chain.baseNumber > 0 + check chain.baseNumber < blk5.header.number + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "headerByNumber": + const info = "headerByNumber" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk5) + check chain.validate info & " (2)" + # cursor + check chain.headerByNumber(8).isErr + check chain.headerByNumber(7).expect("OK").number == 7 + check chain.headerByNumber(7).expect("OK").computeBlockHash == blk7.blockHash + # from db + check chain.headerByNumber(3).expect("OK").number == 3 + check chain.headerByNumber(3).expect("OK").computeBlockHash == blk3.blockHash + # base + check chain.headerByNumber(4).expect("OK").number == 4 + check chain.headerByNumber(4).expect("OK").computeBlockHash == blk4.blockHash + # from cache + check chain.headerByNumber(5).expect("OK").number == 5 + check chain.headerByNumber(5).expect("OK").computeBlockHash == blk5.blockHash + check chain.validate info & " (9)" + + test "3 branches, alternating imports": + const info = "3 branches, alternating imports" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, B4) + checkImportBlock(chain, blk4) + checkImportBlock(chain, B5) + checkImportBlock(chain, blk5) + checkImportBlock(chain, C5) + checkImportBlock(chain, B6) + checkImportBlock(chain, blk6) + checkImportBlock(chain, C6) + checkImportBlock(chain, B7) + checkImportBlock(chain, blk7) + checkImportBlock(chain, C7) + check chain.validate info & " (1)" + check chain.latestHash == C7.blockHash + check chain.latestNumber == 7'u64 + check chain.heads.len == 3 + checkForkChoice(chain, B7, blk3) + check chain.validate info & " (2)" + check chain.heads.len == 3 + checkForkChoice(chain, B7, B6) + check chain.validate info & " (2)" + check chain.heads.len == 1 + + test "importing blocks with new CommonRef and FC instance, 3 blocks": + const info = "importing blocks with new CommonRef and FC instance, 3 blocks" + let com = env.newCom() + let chain = ForkedChainRef.init(com, baseDistance = 0, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkForkChoice(chain, blk3, blk3) + check chain.validate info & " (1)" + let cc = env.newCom(com.db) + let fc = ForkedChainRef.init(cc, baseDistance = 0, persistBatchSize = 0) + checkHeadHash fc, blk3.blockHash + checkImportBlock(fc, blk4) + checkForkChoice(fc, blk4, blk4) + check chain.validate info & " (2)" + + test "importing blocks with new CommonRef and FC instance, 1 block": + const info = "importing blocks with new CommonRef and FC instance, 1 block" + let com = env.newCom() + let chain = ForkedChainRef.init(com, baseDistance = 0, persistBatchSize = 0) + checkImportBlock(chain, blk1) + checkForkChoice(chain, blk1, blk1) + check chain.validate info & " (1)" + let cc = env.newCom(com.db) + let fc = ForkedChainRef.init(cc, baseDistance = 0, persistBatchSize = 0) + checkHeadHash fc, blk1.blockHash + checkImportBlock(fc, blk2) + checkForkChoice(fc, blk2, blk2) + check chain.validate info & " (2)" + + test "newBase move forward, greater than persistBatchSize": + const info = "newBase move forward, greater than persistBatchSize" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk4) + check chain.validate info & " (2)" + + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + + check chain.baseNumber == 4'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase move forward, equal persistBatchSize": + const info = "newBase move forward, equal persistBatchSize" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk2) + check chain.validate info & " (2)" + + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + + check chain.baseNumber == 2'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase move forward, lower than persistBatchSize": + const info = "newBase move forward, lower than persistBatchSize" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk1) + check chain.validate info & " (2)" + + checkHeadHash chain, blk7.blockHash + check chain.latestHash == blk7.blockHash + + check chain.baseNumber == 0'u64 + check chain.heads.len == 1 + check chain.validate info & " (9)" + + test "newBase move forward, auto mode": + const info = "newBase move forward, auto mode" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) + check (waitFor chain.forkChoice(blk7.blockHash, blk6.blockHash)).isErr + check chain.tryUpdatePendingFCU(blk6.blockHash, blk6.header.number) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + + check chain.validate info & " (1)" + + checkHeadHash chain, blk2.blockHash + check chain.latestHash == blk7.blockHash + + check chain.baseNumber == 2'u64 + check chain.heads.len == 1 + check chain.validate info & " (2)" + + test "newBase move forward, auto mode no forkChoice": + const info = "newBase move forward, auto mode no forkChoice" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) + + check chain.tryUpdatePendingFCU(blk5.blockHash, blk5.header.number) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + + check chain.validate info & " (1)" + + checkHeadHash chain, genesisHash + check chain.latestHash == blk7.blockHash + + check chain.baseNumber == 0'u64 + check chain.heads.len == 1 + check chain.validate info & " (2)" + + test "serialize roundtrip": + const info = "serialize roundtrip" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 3) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, blk4) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, B4) + checkImportBlock(chain, B5) + checkImportBlock(chain, B6) + checkImportBlock(chain, B7) + checkImportBlock(chain, blk4) + check chain.validate info & " (1)" + checkForkChoice(chain, blk7, blk5) + check chain.validate info & " (2)" + checkHeadHash chain, blk7.blockHash + check chain.baseNumber == 0'u64 + check chain.latestHash == blk7.blockHash + check chain.validate info & " (3)" + + let txFrame = chain.baseTxFrame + let src = chain.serialize(txFrame) + if src.isErr: + echo "FAILED TO SERIALIZE: ", src.error + check src.isOk + com.db.persist(txFrame, Opt.none(Hash32)) + + var fc = ForkedChainRef.init(com, baseDistance = 3) + let rc = fc.deserialize() + if rc.isErr: + echo "FAILED TO DESERIALIZE: ", rc.error + check rc.isOk + + check fc.heads.len == chain.heads.len + check fc.hashToBlock.len == chain.hashToBlock.len + + checkHeadHash fc, blk7.blockHash + check fc.latestHash == chain.latestHash + check fc.validate info & " (4)" + +suite "ForkedChain mainnet replay": + # A short mainnet replay test to check that the first few hundred blocks can + # be imported using a typical importBlock / fcu sequence - this does not + # test any transactions since these blocks are practically empty, but thanks + # to block rewards the state db keeps changing anyway providing a simple + # smoke test + setup: let - C5 = txFrame.makeBlk(5, blk4, 1.byte) - C6 = txFrame.makeBlk(6, C5) - C7 = txFrame.makeBlk(7, C6) - txFrame.dispose() - test "newBase == oldBase": - const info = "newBase == oldBase" - let com = env.newCom() - var chain = ForkedChainRef.init(com) - # same header twice - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - check chain.validate info & " (1)" - # no parent - checkImportBlockErr(chain, blk5) - checkHeadHash chain, genesisHash - check chain.latestHash == blk3.blockHash - check chain.validate info & " (2)" - # finalized > head -> error - checkForkChoiceErr(chain, blk1, blk3) - check chain.validate info & " (3)" - # blk4 is not part of chain - checkForkChoiceErr(chain, blk4, blk2) - # finalized > head -> error - checkForkChoiceErr(chain, blk1, blk2) - # blk4 is not part of chain - checkForkChoiceErr(chain, blk2, blk4) - # finalized < head -> ok - checkForkChoice(chain, blk2, blk1) - checkHeadHash chain, blk2.blockHash - check chain.latestHash == blk2.blockHash - check chain.validate info & " (7)" - # finalized == head -> ok - checkForkChoice(chain, blk2, blk2) - checkHeadHash chain, blk2.blockHash - check chain.latestHash == blk2.blockHash - check chain.baseNumber == 0'u64 - check chain.validate info & " (8)" - # baggage written - check chain.wdWritten(blk1) == 1 - check chain.wdWritten(blk2) == 2 - check chain.validate info & " (9)" - test "newBase on activeBranch": - const info = "newBase on activeBranch" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, blk4) - check chain.validate info & " (1)" - # newbase == head - checkForkChoice(chain, blk7, blk6) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - check chain.baseBranch == chain.activeBranch - check chain.wdWritten(blk7) == 7 - # head - baseDistance must been persisted - checkPersisted(chain, blk3) - - # It is FC module who is responsible for saving - # finalized hash on a correct txFrame. - let txFrame = chain.txFrame(blk6.blockHash) - let savedFinalized = txFrame.fcuFinalized().expect("OK") - check blk6.blockHash == savedFinalized.hash - - # make sure aristo not wipe out baggage - check chain.wdWritten(blk3) == 3 - check chain.validate info & " (9)" - test "newBase between oldBase and head": - const info = "newBase between oldBase and head" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk6) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - check chain.baseBranch == chain.activeBranch - check chain.wdWritten(blk6) == 6 - check chain.wdWritten(blk7) == 7 - # head - baseDistance must been persisted - checkPersisted(chain, blk3) - # make sure aristo not wipe out baggage - check chain.wdWritten(blk3) == 3 - check chain.validate info & " (9)" - test "newBase == oldBase, fork and stay on that fork": - const info = "newBase == oldBase, fork .." - let com = env.newCom() - var chain = ForkedChainRef.init(com) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, B7, B5) - checkHeadHash chain, B7.blockHash - check chain.latestHash == B7.blockHash - check chain.baseNumber == 0'u64 - check chain.branches.len == 2 - check chain.validate info & " (9)" - test "newBase move forward, fork and stay on that fork": - const info = "newBase move forward, fork .." - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - checkImportBlock(chain, B4) - check chain.validate info & " (1)" - checkForkChoice(chain, B6, B4) - check chain.validate info & " (2)" - checkHeadHash chain, B6.blockHash - check chain.latestHash == B6.blockHash - check chain.baseNumber == 3'u64 - check chain.branches.len == 2 - check chain.validate info & " (9)" - test "newBase on shorter canonical arc, remove oldBase branches": - const info = "newBase on shorter canonical, remove oldBase branches" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, B7, B6) - check chain.validate info & " (2)" - checkHeadHash chain, B7.blockHash - check chain.latestHash == B7.blockHash - check chain.baseNumber == 4'u64 - check chain.branches.len == 1 - check chain.validate info & " (9)" - test "newBase on curbed non-canonical arc": - const info = "newBase on curbed non-canonical .." - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 5, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, B7, B5) - check chain.validate info & " (2)" - checkHeadHash chain, B7.blockHash - check chain.latestHash == B7.blockHash - check chain.baseNumber > 0 - check chain.baseNumber < B4.header.number - check chain.branches.len == 2 - check chain.validate info & " (9)" - test "newBase == oldBase, fork and return to old chain": - const info = "newBase == oldBase, fork .." - let com = env.newCom() - var chain = ForkedChainRef.init(com) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk5) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - check chain.baseNumber == 0'u64 - check chain.validate info & " (9)" - test "newBase on activeBranch, fork and return to old chain": - const info = "newBase on activeBranch, fork .." - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - checkImportBlock(chain, blk4) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk5) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - check chain.baseBranch == chain.activeBranch - check chain.validate info & " (9)" - test "newBase on shorter canonical arc, discard arc with oldBase" & - " (ign dup block)": - const info = "newBase on shorter canonical .." - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - checkImportBlock(chain, blk4) - check chain.validate info & " (1)" - checkForkChoice(chain, B7, B5) - check chain.validate info & " (2)" - checkHeadHash chain, B7.blockHash - check chain.latestHash == B7.blockHash - check chain.baseNumber == 4'u64 - check chain.branches.len == 1 - check chain.validate info & " (9)" - test "newBase on longer canonical arc, discard new branch": - const info = "newBase on longer canonical .." - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk5) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - check chain.baseNumber > 0 - check chain.baseNumber < blk5.header.number - check chain.branches.len == 1 - check chain.validate info & " (9)" - test "headerByNumber": - const info = "headerByNumber" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk5) - check chain.validate info & " (2)" - # cursor - check chain.headerByNumber(8).isErr - check chain.headerByNumber(7).expect("OK").number == 7 - check chain.headerByNumber(7).expect("OK").computeBlockHash == blk7.blockHash - # from db - check chain.headerByNumber(3).expect("OK").number == 3 - check chain.headerByNumber(3).expect("OK").computeBlockHash == blk3.blockHash - # base - check chain.headerByNumber(4).expect("OK").number == 4 - check chain.headerByNumber(4).expect("OK").computeBlockHash == blk4.blockHash - # from cache - check chain.headerByNumber(5).expect("OK").number == 5 - check chain.headerByNumber(5).expect("OK").computeBlockHash == blk5.blockHash - check chain.validate info & " (9)" - test "3 branches, alternating imports": - const info = "3 branches, alternating imports" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, B4) - checkImportBlock(chain, blk4) - checkImportBlock(chain, B5) - checkImportBlock(chain, blk5) - checkImportBlock(chain, C5) - checkImportBlock(chain, B6) - checkImportBlock(chain, blk6) - checkImportBlock(chain, C6) - checkImportBlock(chain, B7) - checkImportBlock(chain, blk7) - checkImportBlock(chain, C7) - check chain.validate info & " (1)" - check chain.latestHash == C7.blockHash - check chain.latestNumber == 7'u64 - check chain.branches.len == 3 - checkForkChoice(chain, B7, blk3) - check chain.validate info & " (2)" - check chain.branches.len == 3 - checkForkChoice(chain, B7, B6) - check chain.validate info & " (2)" - check chain.branches.len == 1 - test "importing blocks with new CommonRef and FC instance, 3 blocks": - const info = "importing blocks with new CommonRef and FC instance, 3 blocks" - let com = env.newCom() - let chain = ForkedChainRef.init(com, baseDistance = 0, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkForkChoice(chain, blk3, blk3) - check chain.validate info & " (1)" - let cc = env.newCom(com.db) - let fc = ForkedChainRef.init(cc, baseDistance = 0, persistBatchSize = 0) - checkHeadHash fc, blk3.blockHash - checkImportBlock(fc, blk4) - checkForkChoice(fc, blk4, blk4) - check chain.validate info & " (2)" - test "importing blocks with new CommonRef and FC instance, 1 block": - const info = "importing blocks with new CommonRef and FC instance, 1 block" - let com = env.newCom() - let chain = ForkedChainRef.init(com, baseDistance = 0, persistBatchSize = 0) - checkImportBlock(chain, blk1) - checkForkChoice(chain, blk1, blk1) - check chain.validate info & " (1)" - let cc = env.newCom(com.db) - let fc = ForkedChainRef.init(cc, baseDistance = 0, persistBatchSize = 0) - checkHeadHash fc, blk1.blockHash - checkImportBlock(fc, blk2) - checkForkChoice(fc, blk2, blk2) - check chain.validate info & " (2)" - - test "newBase move forward, greater than persistBatchSize": - const info = "newBase move forward, greater than persistBatchSize" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk4) - check chain.validate info & " (2)" - - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - - check chain.baseNumber == 4'u64 - check chain.branches.len == 1 - check chain.validate info & " (9)" - - test "newBase move forward, equal persistBatchSize": - const info = "newBase move forward, equal persistBatchSize" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk2) - check chain.validate info & " (2)" - - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - - check chain.baseNumber == 2'u64 - check chain.branches.len == 1 - check chain.validate info & " (9)" - - test "newBase move forward, lower than persistBatchSize": - const info = "newBase move forward, lower than persistBatchSize" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk1) - check chain.validate info & " (2)" - - checkHeadHash chain, blk7.blockHash - check chain.latestHash == blk7.blockHash - - check chain.baseNumber == 0'u64 - check chain.branches.len == 1 - check chain.validate info & " (9)" - - test "newBase move forward, auto mode": - const info = "newBase move forward, auto mode" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) - check (waitFor chain.forkChoice(blk7.blockHash, blk6.blockHash)).isErr - check chain.tryUpdatePendingFCU(blk6.blockHash, blk6.header.number) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - - check chain.validate info & " (1)" - - checkHeadHash chain, blk2.blockHash - check chain.latestHash == blk7.blockHash - - check chain.baseNumber == 2'u64 - check chain.branches.len == 1 - check chain.validate info & " (2)" - - test "newBase move forward, auto mode no forkChoice": - const info = "newBase move forward, auto mode no forkChoice" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3, persistBatchSize = 2) - - check chain.tryUpdatePendingFCU(blk5.blockHash, blk5.header.number) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - - check chain.validate info & " (1)" - - checkHeadHash chain, genesisHash - check chain.latestHash == blk7.blockHash - - check chain.baseNumber == 0'u64 - check chain.branches.len == 1 - check chain.validate info & " (2)" - - test "serialize roundtrip": - const info = "serialize roundtrip" - let com = env.newCom() - var chain = ForkedChainRef.init(com, baseDistance = 3) - checkImportBlock(chain, blk1) - checkImportBlock(chain, blk2) - checkImportBlock(chain, blk3) - checkImportBlock(chain, blk4) - checkImportBlock(chain, blk5) - checkImportBlock(chain, blk6) - checkImportBlock(chain, blk7) - checkImportBlock(chain, B4) - checkImportBlock(chain, B5) - checkImportBlock(chain, B6) - checkImportBlock(chain, B7) - checkImportBlock(chain, blk4) - check chain.validate info & " (1)" - checkForkChoice(chain, blk7, blk5) - check chain.validate info & " (2)" - checkHeadHash chain, blk7.blockHash - check chain.baseNumber == 0'u64 - check chain.latestHash == blk7.blockHash - check chain.baseBranch == chain.activeBranch - check chain.validate info & " (3)" - - let txFrame = chain.baseTxFrame - let src = chain.serialize(txFrame) - if src.isErr: - echo "FAILED TO SERIALIZE: ", src.error - check src.isOk - com.db.persist(txFrame, Opt.none(Hash32)) - - var fc = ForkedChainRef.init(com, baseDistance = 3) - let rc = fc.deserialize() - if rc.isErr: - echo "FAILED TO DESERIALIZE: ", rc.error - check rc.isOk - - check fc.branches.len == chain.branches.len - check fc.hashToBlock.len == chain.hashToBlock.len - - checkHeadHash fc, blk7.blockHash - - check fc.latestHash == blk7.blockHash - check fc.baseBranch == fc.activeBranch - check fc.validate info & " (4)" - - suite "ForkedChain mainnet replay": - # A short mainnet replay test to check that the first few hundred blocks can - # be imported using a typical importBlock / fcu sequence - this does not - # test any transactions since these blocks are practically empty, but thanks - # to block rewards the state db keeps changing anyway providing a simple - # smoke test - setup: - let - era0 = Era1DbRef.init(sourcePath / "replay", "mainnet").expect("Era files present") - com = CommonRef.new(AristoDbMemory.newCoreDbRef(), nil) - fc = ForkedChainRef.init(com) - - test "Replay mainnet era, single FCU": - var blk: EthBlock - for i in 1.. Date: Thu, 3 Jul 2025 16:28:55 +0700 Subject: [PATCH 116/138] More tuning to getPayloadBodiesByRange (#3442) --- .../beacon/api_handler/api_getbodies.nim | 24 +++++++++++-------- execution_chain/core/chain/forked_chain.nim | 11 +++++---- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/execution_chain/beacon/api_handler/api_getbodies.nim b/execution_chain/beacon/api_handler/api_getbodies.nim index 985e2f2502..efb5c32b47 100644 --- a/execution_chain/beacon/api_handler/api_getbodies.nim +++ b/execution_chain/beacon/api_handler/api_getbodies.nim @@ -57,17 +57,21 @@ proc getPayloadBodiesByRange*(ben: BeaconEngineRef, if last > ben.chain.latestNumber: last = ben.chain.latestNumber + let base = ben.chain.baseNumber var list = newSeqOfCap[Opt[ExecutionPayloadBodyV1]](last-start) - # get bodies from database. - for bn in start..min(last, ben.chain.baseNumber): - var body = ben.chain.payloadBodyV1ByNumber(bn).valueOr: - list.add Opt.none(ExecutionPayloadBodyV1) - continue - list.add Opt.some(move(body)) - - # get bodies from cache in FC module. - if last > ben.chain.baseNumber: - ben.chain.payloadBodyV1FromBaseTo(last, list) + if start < base: + # get bodies from database. + for bn in start..min(last, base): + var body = ben.chain.payloadBodyV1ByNumber(bn).valueOr: + list.add Opt.none(ExecutionPayloadBodyV1) + continue + list.add Opt.some(move(body)) + + # get bodies from cache in FC module. + if last > base: + ben.chain.payloadBodyV1InMemory(base, last, list) + else: + ben.chain.payloadBodyV1InMemory(start, last, list) move(list) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 465b88302d..af3229ab5f 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -893,14 +893,15 @@ proc receiptsByBlockHash*(c: ForkedChainRef, blockHash: Hash32): Result[seq[Stor c.baseTxFrame.getReceipts(header.receiptsRoot) -func payloadBodyV1FromBaseTo*(c: ForkedChainRef, - last: BlockNumber, - list: var seq[Opt[ExecutionPayloadBodyV1]]) = +func payloadBodyV1InMemory*(c: ForkedChainRef, + first: BlockNumber, + last: BlockNumber, + list: var seq[Opt[ExecutionPayloadBodyV1]]) = var - blocks = newSeqOfCap[BlockRef](last-c.base.number+1) + blocks = newSeqOfCap[BlockRef](last-first+1) loopIt(c.latest): - if it.number <= last: + if it.number >= first and it.number <= last: blocks.add(it) for i in countdown(blocks.len-1, 0): From 9a4fd90e3df9b91a3bc89a090d2672435a924d17 Mon Sep 17 00:00:00 2001 From: jangko Date: Thu, 3 Jul 2025 17:12:49 +0700 Subject: [PATCH 117/138] Fix fCU log too --- execution_chain/beacon/api_handler/api_forkchoice.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/execution_chain/beacon/api_handler/api_forkchoice.nim b/execution_chain/beacon/api_handler/api_forkchoice.nim index ead419e28b..c5f29bf10b 100644 --- a/execution_chain/beacon/api_handler/api_forkchoice.nim +++ b/execution_chain/beacon/api_handler/api_forkchoice.nim @@ -220,7 +220,7 @@ proc forkchoiceUpdated*(ben: BeaconEngineRef, info "Fork choice updated", requested = header.number, head = chain.latestNumber, - hashHash = headHash.short, + headHash = headHash.short, base = chain.baseNumber, baseHash = chain.baseHash.short, finalizedHash = finalizedBlockHash.short, From 8923504a6b9d6b9c3399dd7509f2ab9cf5db034e Mon Sep 17 00:00:00 2001 From: Chirag Parmar Date: Thu, 3 Jul 2025 16:37:57 +0530 Subject: [PATCH 118/138] proxy: add blocks support (#3338) * add blocks support * add rpc handlers * reviews * format * catch only error exceptions * remove unused imports * review * add basics tests * fix --- nimbus_verified_proxy/header_store.nim | 6 + .../nimbus_verified_proxy.nim | 4 +- .../nimbus_verified_proxy_conf.nim | 8 + nimbus_verified_proxy/rpc/blocks.nim | 279 +- nimbus_verified_proxy/rpc/rpc_eth_api.nim | 121 +- nimbus_verified_proxy/rpc/transactions.nim | 61 + .../tests/all_proxy_tests.nim | 2 +- nimbus_verified_proxy/tests/block.json | 3828 +++++++++++++++++ .../tests/test_transactions.nim | 34 + nimbus_verified_proxy/types.nim | 6 +- 10 files changed, 4269 insertions(+), 80 deletions(-) create mode 100644 nimbus_verified_proxy/rpc/transactions.nim create mode 100644 nimbus_verified_proxy/tests/block.json create mode 100644 nimbus_verified_proxy/tests/test_transactions.nim diff --git a/nimbus_verified_proxy/header_store.nim b/nimbus_verified_proxy/header_store.nim index 80887b3b43..1fdd63c7d8 100644 --- a/nimbus_verified_proxy/header_store.nim +++ b/nimbus_verified_proxy/header_store.nim @@ -116,6 +116,12 @@ func finalized*(self: HeaderStore): Opt[Header] = func finalizedHash*(self: HeaderStore): Opt[Hash32] = self.finalizedHash +func contains*(self: HeaderStore, hash: Hash32): bool = + self.headers.contains(hash) + +func contains*(self: HeaderStore, number: base.BlockNumber): bool = + self.hashes.contains(number) + proc updateFinalized*( self: HeaderStore, header: ForkedLightClientHeader ): Result[bool, string] = diff --git a/nimbus_verified_proxy/nimbus_verified_proxy.nim b/nimbus_verified_proxy/nimbus_verified_proxy.nim index 1038577182..13ead324ad 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy.nim @@ -84,7 +84,9 @@ proc run*( # header cache contains headers downloaded from p2p headerStore = HeaderStore.new(config.cacheLen) - let verifiedProxy = VerifiedRpcProxy.init(rpcProxy, headerStore, chainId) + # TODO: add config object to verified proxy for future config options + let verifiedProxy = + VerifiedRpcProxy.init(rpcProxy, headerStore, chainId, config.maxBlockWalk) # add handlers that verify RPC calls /rpc/rpc_eth_api.nim verifiedProxy.installEthApiHandlers() diff --git a/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim b/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim index 01cfe91a92..1e8b3a7817 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy_conf.nim @@ -141,6 +141,14 @@ type VerifiedProxyConf* = object # Config name: "max-peers" .}: int + maxBlockWalk* {. + hidden, + desc: "Maximum number of blocks that will be queried to serve a request", + defaultValue: 1000, + defaultValueDesc: "1000", + name: "debug-max-walk" + .}: uint64 + hardMaxPeers* {. desc: "The maximum number of peers to connect to. Defaults to maxPeers * 1.5", name: "hard-max-peers" diff --git a/nimbus_verified_proxy/rpc/blocks.nim b/nimbus_verified_proxy/rpc/blocks.nim index 5eccaab9a2..79a92a14dd 100644 --- a/nimbus_verified_proxy/rpc/blocks.nim +++ b/nimbus_verified_proxy/rpc/blocks.nim @@ -9,59 +9,244 @@ import std/strutils, - stint, - chronos, results, + chronicles, + web3/[eth_api_types, eth_api], + json_rpc/[rpcproxy, rpcserver, rpcclient], eth/common/eth_types_rlp, - web3/eth_api_types, + eth/rlp, + eth/trie/[ordered_trie, trie_defs], + ../../execution_chain/beacon/web3_eth_conv, + ../types, ../header_store, - ../types + ./transactions -type - QuantityTagKind = enum - LatestBlock - BlockNumber - - QuantityTag = object - case kind: QuantityTagKind - of LatestBlock: - discard - of BlockNumber: - blockNumber: Quantity - -func parseQuantityTag(blockTag: BlockTag): Result[QuantityTag, string] = +proc resolveBlockTag*( + vp: VerifiedRpcProxy, blockTag: BlockTag +): Result[base.BlockNumber, string] = if blockTag.kind == bidAlias: - let tag = blockTag.alias.toLowerAscii + let tag = blockTag.alias.toLowerAscii() case tag of "latest": - return ok(QuantityTag(kind: LatestBlock)) + let hLatest = vp.headerStore.latest.valueOr: + return err("Couldn't get the latest block number from header store") + ok(hLatest.number) else: - return err("Unsupported blockTag: " & tag) + err("No support for block tag " & $blockTag) else: - let quantity = blockTag.number - return ok(QuantityTag(kind: BlockNumber, blockNumber: quantity)) - -template checkPreconditions(proxy: VerifiedRpcProxy) = - if proxy.headerStore.isEmpty(): - raise newException(ValueError, "Syncing") - -proc getHeaderByTag( - proxy: VerifiedRpcProxy, quantityTag: BlockTag -): results.Opt[Header] {.raises: [ValueError].} = - checkPreconditions(proxy) - - let tag = parseQuantityTag(quantityTag).valueOr: - raise newException(ValueError, error) - - case tag.kind - of LatestBlock: - # this will always return some block, as we always checkPreconditions - proxy.headerStore.latest - of BlockNumber: - proxy.headerStore.get(base.BlockNumber(distinctBase(tag.blockNumber))) - -proc getHeaderByTagOrThrow*( - proxy: VerifiedRpcProxy, quantityTag: BlockTag -): Header {.raises: [ValueError].} = - getHeaderByTag(proxy, quantityTag).valueOr: - raise newException(ValueError, "No block stored for given tag " & $quantityTag) + ok(base.BlockNumber(distinctBase(blockTag.number))) + +func convHeader(blk: eth_api_types.BlockObject): Header = + let nonce = blk.nonce.valueOr: + default(Bytes8) + + return Header( + parentHash: blk.parentHash, + ommersHash: blk.sha3Uncles, + coinbase: blk.miner, + stateRoot: blk.stateRoot, + transactionsRoot: blk.transactionsRoot, + receiptsRoot: blk.receiptsRoot, + logsBloom: blk.logsBloom, + difficulty: blk.difficulty, + number: base.BlockNumber(distinctBase(blk.number)), + gasLimit: GasInt(blk.gasLimit.uint64), + gasUsed: GasInt(blk.gasUsed.uint64), + timestamp: ethTime(blk.timestamp), + extraData: seq[byte](blk.extraData), + mixHash: Bytes32(distinctBase(blk.mixHash)), + nonce: nonce, + baseFeePerGas: blk.baseFeePerGas, + withdrawalsRoot: blk.withdrawalsRoot, + blobGasUsed: blk.blobGasUsed.u64, + excessBlobGas: blk.excessBlobGas.u64, + parentBeaconBlockRoot: blk.parentBeaconBlockRoot, + requestsHash: blk.requestsHash, + ) + +proc walkBlocks( + vp: VerifiedRpcProxy, + sourceNum: base.BlockNumber, + targetNum: base.BlockNumber, + sourceHash: Hash32, + targetHash: Hash32, +): Future[Result[void, string]] {.async: (raises: []).} = + var nextHash = sourceHash + info "Starting block walk to verify requested block", blockHash = targetHash + + let numBlocks = sourceNum - targetNum + if numBlocks > vp.maxBlockWalk: + return err( + "Cannot query more than " & $vp.maxBlockWalk & + " to verify the chain for the requested block" + ) + + for i in 0 ..< numBlocks: + let nextHeader = + if vp.headerStore.contains(nextHash): + vp.headerStore.get(nextHash).get() + else: + let blk = + try: + await vp.rpcClient.eth_getBlockByHash(nextHash, false) + except CatchableError as e: + return err( + "Couldn't get block " & $nextHash & " during the chain traversal: " & e.msg + ) + + trace "getting next block", + hash = nextHash, + number = blk.number, + remaining = distinctBase(blk.number) - targetNum + + let header = convHeader(blk) + + if header.computeBlockHash != nextHash: + return err("Encountered an invalid block header while walking the chain") + + header + + if nextHeader.parentHash == targetHash: + return ok() + + nextHash = nextHeader.parentHash + + err("the requested block is not part of the canonical chain") + +proc verifyHeader( + vp: VerifiedRpcProxy, header: Header, hash: Hash32 +): Future[Result[void, string]] {.async.} = + # verify calculated hash with the requested hash + if header.computeBlockHash != hash: + return err("hashed block header doesn't match with blk.hash(downloaded)") + + if not vp.headerStore.contains(hash): + let latestHeader = vp.headerStore.latest.valueOr: + return err("Couldn't get the latest header, syncing in progress") + + # walk blocks backwards(time) from source to target + ?( + await vp.walkBlocks( + latestHeader.number, header.number, latestHeader.parentHash, hash + ) + ) + + ok() + +proc verifyBlock( + vp: VerifiedRpcProxy, blk: BlockObject, fullTransactions: bool +): Future[Result[void, string]] {.async.} = + let header = convHeader(blk) + + ?(await vp.verifyHeader(header, blk.hash)) + + # verify transactions + if fullTransactions: + ?verifyTransactions(header.transactionsRoot, blk.transactions) + + # verify withdrawals + if blk.withdrawalsRoot.isSome(): + if blk.withdrawalsRoot.get() != orderedTrieRoot(blk.withdrawals.get(@[])): + return err("Withdrawals within the block do not yield the same withdrawals root") + else: + if blk.withdrawals.isSome(): + return err("Block contains withdrawals but no withdrawalsRoot") + + ok() + +proc getBlock*( + vp: VerifiedRpcProxy, blockHash: Hash32, fullTransactions: bool +): Future[Result[BlockObject, string]] {.async.} = + # get the target block + let blk = + try: + await vp.rpcClient.eth_getBlockByHash(blockHash, fullTransactions) + except CatchableError as e: + return err(e.msg) + + # verify requested hash with the downloaded hash + if blockHash != blk.hash: + return err("the downloaded block hash doesn't match with the requested hash") + + # verify the block + ?(await vp.verifyBlock(blk, fullTransactions)) + + ok(blk) + +proc getBlock*( + vp: VerifiedRpcProxy, blockTag: BlockTag, fullTransactions: bool +): Future[Result[BlockObject, string]] {.async.} = + let n = vp.resolveBlockTag(blockTag).valueOr: + return err(error) + + # get the target block + let blk = + try: + await vp.rpcClient.eth_getBlockByNumber(blockTag, fullTransactions) + except CatchableError as e: + return err(e.msg) + + if n != distinctBase(blk.number): + return + err("the downloaded block number doesn't match with the requested block number") + + # verify the block + ?(await vp.verifyBlock(blk, fullTransactions)) + + ok(blk) + +proc getHeader*( + vp: VerifiedRpcProxy, blockHash: Hash32 +): Future[Result[Header, string]] {.async.} = + let cachedHeader = vp.headerStore.get(blockHash) + + if cachedHeader.isNone(): + debug "did not find the header in the cache", blockHash = blockHash + else: + return ok(cachedHeader.get()) + + # get the target block + let blk = + try: + await vp.rpcClient.eth_getBlockByHash(blockHash, false) + except CatchableError as e: + return err(e.msg) + + let header = convHeader(blk) + + if blockHash != blk.hash: + return err("the blk.hash(downloaded) doesn't match with the provided hash") + + ?(await vp.verifyHeader(header, blockHash)) + + ok(header) + +proc getHeader*( + vp: VerifiedRpcProxy, blockTag: BlockTag +): Future[Result[Header, string]] {.async.} = + let + n = vp.resolveBlockTag(blockTag).valueOr: + return err(error) + cachedHeader = vp.headerStore.get(n) + + if cachedHeader.isNone(): + debug "did not find the header in the cache", blockTag = blockTag + else: + return ok(cachedHeader.get()) + + # get the target block + let blk = + try: + await vp.rpcClient.eth_getBlockByNumber(blockTag, false) + except CatchableError as e: + return err(e.msg) + + let header = convHeader(blk) + + if n != header.number: + return + err("the downloaded block number doesn't match with the requested block number") + + ?(await vp.verifyHeader(header, blk.hash)) + + ok(header) diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 7b155a6ac1..87d9f2bac5 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -21,69 +21,130 @@ import logScope: topics = "verified_proxy" -proc installEthApiHandlers*(lcProxy: VerifiedRpcProxy) = - lcProxy.proxy.rpc("eth_chainId") do() -> UInt256: - lcProxy.chainId +proc installEthApiHandlers*(vp: VerifiedRpcProxy) = + vp.proxy.rpc("eth_chainId") do() -> UInt256: + vp.chainId - lcProxy.proxy.rpc("eth_blockNumber") do() -> uint64: + vp.proxy.rpc("eth_blockNumber") do() -> uint64: ## Returns the number of the most recent block. - let latest = lcProxy.headerStore.latest.valueOr: + let latest = vp.headerStore.latest.valueOr: raise newException(ValueError, "Syncing") latest.number.uint64 - lcProxy.proxy.rpc("eth_getBalance") do( - address: Address, quantityTag: BlockTag - ) -> UInt256: + vp.proxy.rpc("eth_getBalance") do(address: Address, quantityTag: BlockTag) -> UInt256: let - header = lcProxy.getHeaderByTagOrThrow(quantityTag) - - account = (await lcProxy.getAccount(address, header.number, header.stateRoot)).valueOr: + header = (await vp.getHeader(quantityTag)).valueOr: + raise newException(ValueError, error) + account = (await vp.getAccount(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) account.balance - lcProxy.proxy.rpc("eth_getStorageAt") do( + vp.proxy.rpc("eth_getStorageAt") do( address: Address, slot: UInt256, quantityTag: BlockTag ) -> UInt256: let - header = lcProxy.getHeaderByTagOrThrow(quantityTag) - storage = ( - await lcProxy.getStorageAt(address, slot, header.number, header.stateRoot) - ).valueOr: + header = (await vp.getHeader(quantityTag)).valueOr: + raise newException(ValueError, error) + storage = (await vp.getStorageAt(address, slot, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) storage - lcProxy.proxy.rpc("eth_getTransactionCount") do( + vp.proxy.rpc("eth_getTransactionCount") do( address: Address, quantityTag: BlockTag ) -> Quantity: let - header = lcProxy.getHeaderByTagOrThrow(quantityTag) - account = (await lcProxy.getAccount(address, header.number, header.stateRoot)).valueOr: + header = (await vp.getHeader(quantityTag)).valueOr: + raise newException(ValueError, error) + account = (await vp.getAccount(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) Quantity(account.nonce) - lcProxy.proxy.rpc("eth_getCode") do( - address: Address, quantityTag: BlockTag - ) -> seq[byte]: + vp.proxy.rpc("eth_getCode") do(address: Address, quantityTag: BlockTag) -> seq[byte]: let - header = lcProxy.getHeaderByTagOrThrow(quantityTag) - code = (await lcProxy.getCode(address, header.number, header.stateRoot)).valueOr: + header = (await vp.getHeader(quantityTag)).valueOr: + raise newException(ValueError, error) + code = (await vp.getCode(address, header.number, header.stateRoot)).valueOr: raise newException(ValueError, error) code + vp.proxy.rpc("eth_getBlockByHash") do( + blockHash: Hash32, fullTransactions: bool + ) -> BlockObject: + (await vp.getBlock(blockHash, fullTransactions)).valueOr: + raise newException(ValueError, error) + + vp.proxy.rpc("eth_getBlockByNumber") do( + blockTag: BlockTag, fullTransactions: bool + ) -> BlockObject: + (await vp.getBlock(blockTag, fullTransactions)).valueOr: + raise newException(ValueError, error) + + vp.proxy.rpc("eth_getUncleCountByBlockNumber") do(blockTag: BlockTag) -> Quantity: + let blk = (await vp.getBlock(blockTag, false)).valueOr: + raise newException(ValueError, error) + + Quantity(blk.uncles.len()) + + vp.proxy.rpc("eth_getUncleCountByBlockHash") do(blockHash: Hash32) -> Quantity: + let blk = (await vp.getBlock(blockHash, false)).valueOr: + raise newException(ValueError, error) + + Quantity(blk.uncles.len()) + + vp.proxy.rpc("eth_getBlockTransactionCountByNumber") do( + blockTag: BlockTag + ) -> Quantity: + let blk = (await vp.getBlock(blockTag, true)).valueOr: + raise newException(ValueError, error) + + Quantity(blk.transactions.len) + + vp.proxy.rpc("eth_getBlockTransactionCountByHash") do(blockHash: Hash32) -> Quantity: + let blk = (await vp.getBlock(blockHash, true)).valueOr: + raise newException(ValueError, error) + + Quantity(blk.transactions.len) + + vp.proxy.rpc("eth_getTransactionByBlockNumberAndIndex") do( + blockTag: BlockTag, index: Quantity + ) -> TransactionObject: + let blk = (await vp.getBlock(blockTag, true)).valueOr: + raise newException(ValueError, error) + + if distinctBase(index) >= uint64(blk.transactions.len): + raise newException(ValueError, "provided transaction index is outside bounds") + let x = blk.transactions[distinctBase(index)] + + doAssert x.kind == tohTx + + x.tx + + vp.proxy.rpc("eth_getTransactionByBlockHashAndIndex") do( + blockHash: Hash32, index: Quantity + ) -> TransactionObject: + let blk = (await vp.getBlock(blockHash, true)).valueOr: + raise newException(ValueError, error) + + if distinctBase(index) >= uint64(blk.transactions.len): + raise newException(ValueError, "provided transaction index is outside bounds") + let x = blk.transactions[distinctBase(index)] + + doAssert x.kind == tohTx + + x.tx + # TODO: # Following methods are forwarded directly to the web3 provider and therefore # are not validated in any way. - lcProxy.proxy.registerProxyMethod("net_version") - lcProxy.proxy.registerProxyMethod("eth_call") - lcProxy.proxy.registerProxyMethod("eth_sendRawTransaction") - lcProxy.proxy.registerProxyMethod("eth_getTransactionReceipt") - lcProxy.proxy.registerProxyMethod("eth_getBlockByNumber") - lcProxy.proxy.registerProxyMethod("eth_getBlockByHash") + vp.proxy.registerProxyMethod("net_version") + vp.proxy.registerProxyMethod("eth_call") + vp.proxy.registerProxyMethod("eth_sendRawTransaction") + vp.proxy.registerProxyMethod("eth_getTransactionReceipt") # Used to be in eth1_monitor.nim; not sure why it was deleted, # so I copied it here. --Adam diff --git a/nimbus_verified_proxy/rpc/transactions.nim b/nimbus_verified_proxy/rpc/transactions.nim new file mode 100644 index 0000000000..d0b6307168 --- /dev/null +++ b/nimbus_verified_proxy/rpc/transactions.nim @@ -0,0 +1,61 @@ +# nimbus_verified_proxy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +import + stint, + results, + eth/common/eth_types_rlp, + eth/trie/[ordered_trie, trie_defs], + web3/[eth_api_types, eth_api], + ../../execution_chain/beacon/web3_eth_conv + +proc toTransaction(tx: TransactionObject): Transaction = + Transaction( + txType: tx.`type`.get(0.Web3Quantity).TxType, + chainId: tx.chainId.get(0.u256), + nonce: tx.nonce.AccountNonce, + gasPrice: tx.gasPrice.GasInt, + maxPriorityFeePerGas: tx.maxPriorityFeePerGas.get(0.Web3Quantity).GasInt, + maxFeePerGas: tx.maxFeePerGas.get(0.Web3Quantity).GasInt, + gasLimit: tx.gas.GasInt, + to: tx.to, + value: tx.value, + payload: tx.input, + accessList: tx.accessList.get(@[]), + maxFeePerBlobGas: tx.maxFeePerBlobGas.get(0.u256), + versionedHashes: tx.blobVersionedHashes.get(@[]), + V: tx.v.uint64, + R: tx.r, + S: tx.s, + authorizationList: tx.authorizationList.get(@[]), + ) + +proc toTransactions(txs: openArray[TxOrHash]): Result[seq[Transaction], string] = + var convertedTxs = newSeqOfCap[Transaction](txs.len) + for x in txs: + if x.kind == tohTx: + convertedTxs.add toTransaction(x.tx) + else: + return err("cannot construct a transaction trie using only txhashes") + + return ok(convertedTxs) + +proc checkTxHash*(txObj: TransactionObject, txHash: Hash32): bool = + toTransaction(txObj).rlpHash == txHash + +proc verifyTransactions*( + txRoot: Hash32, transactions: seq[TxOrHash] +): Result[void, string] = + let + txs = toTransactions(transactions).valueOr: + return err(error) + rootHash = orderedTrieRoot(txs) + + if rootHash == txRoot: + return ok() + + err("calculated tx trie root doesn't match the provided tx trie root") diff --git a/nimbus_verified_proxy/tests/all_proxy_tests.nim b/nimbus_verified_proxy/tests/all_proxy_tests.nim index 0bff1acc34..dbe5b3dd0f 100644 --- a/nimbus_verified_proxy/tests/all_proxy_tests.nim +++ b/nimbus_verified_proxy/tests/all_proxy_tests.nim @@ -5,4 +5,4 @@ # * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) # at your option. This file may not be copied, modified, or distributed except according to those terms. -import ./test_proof_validation, ./test_header_store +import ./test_proof_validation, ./test_header_store, ./test_transactions diff --git a/nimbus_verified_proxy/tests/block.json b/nimbus_verified_proxy/tests/block.json new file mode 100644 index 0000000000..0e516f7c96 --- /dev/null +++ b/nimbus_verified_proxy/tests/block.json @@ -0,0 +1,3828 @@ +{ + "number": "0x1000000", + "hash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "parentHash": "0xf34c3c11b35466e5595e077239e6b25a7c3ec07a214b2492d42ba6d73d503a1b", + "nonce": "0x0000000000000000", + "mixHash": "0x9f5fd11335938ac040c82dc4330a99957a81fa480e548570f71baa1cd245d4bb", + "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "logsBloom": "0xa821ad60a15607455d2c8b3ca14ae1a608a1efbb8fb1201781096041f4d3ec105cd59bf117887838c4627f2eea0d255e66237252ba96fae7422d9a1272ef68f2a0a841cf6c08852ffb23d0297038a6a480a54c29555c7c49f88bbe56f8613965d24ccb3402e6b1e800499f10280c3d511858b0750056a7b6560933df4d5a7f143a5d937710521518a8fcdd43fd7202fc05e8578bd5494a5da4a83560095f90279a9d7b2b36a7614b9791e6c02ab6a8a996686a4034c7a3877be514c6936b245be3d01452b1a704f6f9288282fa39b1056fed026a9eaed455a6895a36283cf540d3b7f608a10200bbc90c1785018104f01a502c8c556c2341e9096ef8d222d683", + "stateRoot": "0x8e8b72abe2caef6bcbc4919ae6a372aac81d75abc21a472f1b6f4964d72c9ddc", + "miner": "0x1f9090aae28b8a3dceadf281b0f12828e676c326", + "difficulty": "0x0", + "extraData": "0x7273796e632d6275696c6465722e78797a", + "size": "0x1f6a4", + "gasLimit": "0x1c9c380", + "gasUsed": "0xef2f92", + "timestamp": "0x6407537f", + "transactionsRoot": "0x6c171b24bd12308508639790b82bb5318493016ec46e4688427449f6f6b8f354", + "receiptsRoot": "0xec59796c98c8d82b77b4a28aedd21c2e9422871631a447eb6407c9cf6817d2d8", + "baseFeePerGas": "0xe538bec8c", + "totalDifficulty": "0xc70d815d562d3cfa955", + "uncles": [], + "transactions": [ + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x0", + "hash": "0xb0b31990a5b94de6563f0ed84004a4eadb89a44a37a3ddc19f693235782b8aed", + "from": "0xd064dd76193e130f5be4b805783d8fe054ab0fd1", + "to": "0xd37bbe5744d730a1d98d8dc97c42f0ca46ad7146", + "input": "0x574da717000000000000000000000000251a25a4835319ff72606a066966cc0f0f21c7fe000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000048d5cc090c1a4000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000444f55543a3245384441353445303637333741393631433934343331463735373333313142413332333536443332334139314146334544324233383639313444343435394500000000000000000000000000000000000000000000000000000000", + "nonce": "0x4f", + "value": "0x48d5cc090c1a4000", + "gas": "0x13880", + "gasPrice": "0x22ecb25c00", + "type": "0x0", + "v": "0x26", + "r": "0x6608219b76b7482e0a0dd0922fddbe9c59218e967c4df705fffc2b27f39441c7", + "s": "0xbe12032bd854460d7e4f6ae0cf9cf96018219ac24c50904ac746c2fd4256632" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1", + "hash": "0xa3702d5e0574792d5c17959cfb0fb4c8f0d1b89286d573c46bdec81647cf5b55", + "from": "0x18390e83a080ecb9f120dda888aba3c4ca084f36", + "to": "0x280027dd00ee0050d3f9d168efd6b40090009246", + "input": "0x6234ff1900000000000000000000000088e6a0c2ddd26feeb64f039a2c41296fcb3f56400000000000000000000000000000000000000000000000000000001fe0b6cca4000000000000000000000000000000000000000000000004d0228fb2d6700000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "nonce": "0x134a", + "value": "0x0", + "gas": "0x248b9", + "gasPrice": "0x1fd22e7f09", + "maxPriorityFeePerGas": "0x1fd22e7f09", + "maxFeePerGas": "0x1fd22e7f09", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x382d8e906483eff47ebac3452e455b0a4e46bb49a740254a3482dc5bf006d486", + "s": "0x4957db6ee5ee13e839d7b12c03b5a7bb32953cca1acaa80f4da0ffbc79b97447", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2", + "hash": "0xbc0671b6fb715c20cd168f8bd4d2bfe08d1d2f2f8b4d57861ba4d289893fe463", + "from": "0x242510fe96a4fa2d4ac7de68cd41944cd71d4099", + "to": "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "input": "0x38ed1739000000000000000000000000000000000000000336249890dd59dcd2edd5ce2c0000000000000000000000000000000000000000000000002ccbaad9e001638700000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000242510fe96a4fa2d4ac7de68cd41944cd71d409900000000000000000000000000000000000000000000000000000000640753f00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000796a4503b444a71b331c9556bef0815237ddeabc000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "nonce": "0x3386", + "value": "0x0", + "gas": "0x2e804", + "gasPrice": "0xee88ee58c", + "maxPriorityFeePerGas": "0x9502f900", + "maxFeePerGas": "0x1d3c1ad218", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x17a1c4399b820f632343f596b85919fcfd358b2d2bbd82d4ba695e75d7e1c570", + "s": "0x656cdcf86262773931cf41e3fcf0f8fbb06961671d9da94ae5dbd5775ee2f576", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3", + "hash": "0x7a98808879469b45aa56ea955c7560242887425a04f276c966eb127b65ca0637", + "from": "0x58070dd4b871149ce4eed3b2c9fde52dbfa8ce1e", + "to": "0x43cc953fb952d10c8f810267092fb12145d56d40", + "input": "0x8b3c99e3000000000000000000000000000000000000000000000000000000006407537f", + "nonce": "0x190c", + "value": "0x1421a6d79ea800", + "gas": "0xea60", + "gasPrice": "0xee88ee58c", + "maxPriorityFeePerGas": "0x9502f900", + "maxFeePerGas": "0x1d3c1ad218", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x642be1742e8c68bc42c223a773bf0f40cf024472e8decff3d207111105b435ef", + "s": "0x7490623f871ea9627eb12e16ea8df98f3898c59e62783e0aa811fb3d9d1cb04d", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4", + "hash": "0x94d8c13227ab8416011df51016a7eedf67213267de4fde3daf1c05b48572abcc", + "from": "0xccfb4b91ff5d1a2319c96ab6b59be4cdefb8437d", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe0f6740b0000000000000000000000009c4fe5ffd9a9fc5678cfbd93aa2d4fd684b67c4c000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000045804880de22913dafe09f4980848ece6ecbaf78000000000000000000000000000000000000000000000000598d33d514dcdff80000000000000000000000000000000000000000000000004bef95fb41e88b78", + "nonce": "0x5ec9", + "value": "0x1000000", + "gas": "0x61a80", + "gasPrice": "0x14fc87ba03", + "type": "0x0", + "v": "0x26", + "r": "0x8aa01d0b448fff740f028368369456b32b663f7efd4e9a1425d92989625e589f", + "s": "0x2ebdd62e7b1a1196b515ea7e30c24956e0e129d5cb862bec04c7002dbe38faa" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5", + "hash": "0x3ed0bf08da872b398f0439ae18c6b375a7ff1636870f32bad83f0ca75263e5f1", + "from": "0x98550391ea2af72265579684e5a3578e7c57ab6a", + "to": "0x280027dd00ee0050d3f9d168efd6b40090009246", + "input": "0x6234ff1900000000000000000000000011b815efb8f581194ae79006d24e0d814b7697f600000000000000000000000000000000000000000000000000000009f0e1cafc0000000000000000000000000000000000000000000000018049240ff3f17000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7", + "nonce": "0x13a2", + "value": "0x0", + "gas": "0x23517", + "gasPrice": "0x14f352538f", + "maxPriorityFeePerGas": "0x14f352538f", + "maxFeePerGas": "0x14f352538f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x2c572eb6de7b1c08ef4c3d4546a3efb6cff234e81dbaebc5d9ba419b4c5be161", + "s": "0x318e597f067adfa1e25db7dab7c7b36cec8f3d1b569d46e1bf7d0d0d41117ad3", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6", + "hash": "0x9bc80106ab46a57a520aa37560fcf6619668e0350594cb418a60fd7b9537485c", + "from": "0xa70295e6dbf88f2ab9b9e44705a678f81d1d9c52", + "to": "0x280027dd00ee0050d3f9d168efd6b40090009246", + "input": "0x6234ff1900000000000000000000000060594a405d53811d3bc4766596efd80fd545a2700000000000000000000000000000000000000000000004ada4755af722000000000000000000000000000000000000000000000000000000c6eddd52affff000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000006b175474e89094c44da98b954eedeac495271d0f", + "nonce": "0x129a", + "value": "0x0", + "gas": "0x20024", + "gasPrice": "0x14ef733462", + "maxPriorityFeePerGas": "0x14ef733462", + "maxFeePerGas": "0x14ef733462", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xf325e56cd19914241c07826a09002ca178ad18737f1ba456fc54fbc665f8fa0d", + "s": "0x24e7b328f648fd052edfac7cf71e65549c817e4df020aa30ee3a0920a86becf9", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7", + "hash": "0xa00a4b8423b4b2089b3809fc8d3e6e84bdf495b12299e72d1a698f6d3c52b341", + "from": "0xffec0067f5a79cff07527f63d83dd5462ccf8ba4", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb00000000000000000000000035a93230fecfbdb710254b59ff9cab2539e63387000000000000000000000000000000000000000000000000000000003bb09b5c", + "nonce": "0x1407fc", + "value": "0x0", + "gas": "0x30d40", + "gasPrice": "0x1328452a00", + "type": "0x0", + "v": "0x26", + "r": "0x70c399212043ce8fcfa7fd55b609bd48f2f86f52d607a9d13fe9ed748657f5b8", + "s": "0x4aa678395073d71ecebfc6dfe0a5a24cd77b84a1104d809848a0fb71a3a25f66" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8", + "hash": "0x7c79f4613b08d9944660cf0102769c43820810c881eaa485742ea2a7e2d9d30b", + "from": "0xc35fb86f962ea955751a793a007b5cdd44f798d7", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0x23b872dd000000000000000000000000c3b6f4aba332352b3082890a52961ff17af10038000000000000000000000000b580553a15bf4e450fdc046eca23cc9f88b83fe60000000000000000000000000000000000000000000000000000000002bb1725", + "nonce": "0x3c059", + "value": "0x0", + "gas": "0x14fe1", + "gasPrice": "0x1321385497", + "type": "0x0", + "v": "0x25", + "r": "0x5657d06761be1e445c475721161f410247946226327accd82f3ee4c7f5dac5f0", + "s": "0x72e035c2865ed1819a3c5b4e5ab83d88700061b4bc1d3d90c4817f44a8c221ad" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9", + "hash": "0xd07c5ced5815b445c4b77d654621c23e39476f6f84efef8ead0739eb99bcceee", + "from": "0xc35fb86f962ea955751a793a007b5cdd44f798d7", + "to": "0xcf1b9a5216c538c3bad113c679ab399e192738d4", + "input": "0x", + "nonce": "0x3c05a", + "value": "0x72a99638507400", + "gas": "0x5208", + "gasPrice": "0x1321385497", + "type": "0x0", + "v": "0x26", + "r": "0x38603e06a72b09c6f30916cfe331d6d5a2fc561c7a8f79344866f3fed5ec1502", + "s": "0x1818002c57a153393b7fd853fbc612ca1b2f51c36e50899bdd23cffa426323e3" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa", + "hash": "0x9b42149dd683e06c6be304d3a33a12201a9a669c261ec1e6bc0d84b8efa78e9e", + "from": "0x848d47d6a68fc0ce61e234257fa7024c59d331dc", + "to": "0x93a44734c2ee15d9f80ba65cb7182e19d445a6a4", + "input": "0xcff293aa000000000000000000000000000000000000000000000000000000000000008000000000000000000000000045804880de22913dafe09f4980848ece6ecbaf78000000000000000000000000dd75cd55c1367b3d2d928c6181eea46999d24a7200000000000000000000000000000000000000000000000000000000640753f000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000006124fee993bc00000000000000000000000000000000000000000000000000005e979f84dbf03ad9", + "nonce": "0x2732c", + "value": "0x0", + "gas": "0x5d430", + "gasPrice": "0x12ed55988c", + "maxPriorityFeePerGas": "0x499c9ac00", + "maxFeePerGas": "0x17d5fe9465", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xfeb3f551d1938fc4960fea5aa51cf8ec95db61020c6d5bf737cedf8b487dc05e", + "s": "0x3ab0f4baf5bca5554b942c8fa91e6eeac80f55f5490b9ec7f5e4d00988c55977", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xb", + "hash": "0xc85fdb5fb384d370ced5430b2b21a88ae84cfde2a63b690a02075995412955e1", + "from": "0x5e2b6c6b2240d582995537d3fafdb556e4a3822f", + "to": "0x98c3d3183c4b8a650614ad179a1a98be0a8d6b8e", + "input": "0xce2e62ff0000000000000000000000000000000000000000000000734db3ca75f14418fc0000000000000000000000000000000000000000000000000000000048ebff2b0000000000000000000000003155acd9f75915fcc21d34035f440da7040bd3ba0000000000000000000000008642a849d0dcb7a15a974794668adcfbe4794b56000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000640753c8", + "nonce": "0x33770", + "value": "0x0", + "gas": "0x493ee", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x126d638855", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xad2a9b80579f2e5b8b11d10971672685b77001fcacd1991ce0ae4d8b693583d3", + "s": "0x69b20db1794eed484fa43359cccfb09216b92daaf72ce3c8610422f2a867c97b", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xc", + "hash": "0xc9a916d85267ba8e8f115aebb345f2d12ca4679a1bc20eded29722109cf26165", + "from": "0x877e13f6908ba2d3675fbdd2dc565533911e7c8f", + "to": "0x0eae044f00b0af300500f090ea00027097d03000", + "input": "0x000000133155acd9f75915fcc21d34035f440da7040bd3ba1c0000000000000000000043820dc900000000006a257e1a8b7524f3c000351006780816e8ab8b2c68fbb9f889bacd69c389d2980000000000003eb71f16b59f1d0ebb005b174620bee6e5bd679b0242bf0bd6291b0ac45b909b0000000000000000000000451f922f", + "nonce": "0x1f45", + "value": "0x0", + "gas": "0x33aef", + "gasPrice": "0x14b546d339", + "maxPriorityFeePerGas": "0x661bae6ad", + "maxFeePerGas": "0x158d6d02fe", + "accessList": [ + { + "address": "0xba41ddf06b7ffd89d1267b5a93bfef2424eb2003", + "storageKeys": [ + "0x8ad01b4e1e0c91faf5c11740c4d70980b1b5f8e851100687bc50350665b9298f", + "0x8363525686300596d12d1e1b961db90c297f1e2eb7ba5c9c0c02e29ed9cbcf06" + ] + }, + { + "address": "0x4620bee6e5bd679b0242bf0bd6291b0ac45b909b", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007" + ] + }, + { + "address": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "storageKeys": [ + "0x10d6a54a4754c8869d6886b5f5d7fbfa5b4522237ea5c60d11bc4e7a1ff9390b", + "0x5d069f7732d74fb157ea6f4b6d3c251b1dbb2bdcd7c1739824a888af215af129", + "0xdb39a974c59aef8f55ec5ffa2e11db9eba954fcfd1643ebd56d8098e7cf6fcba", + "0x02ef5f214522215a5616e66130b3f6154784e7ac1731b268f1b34f0e6eaec841", + "0x76921f5b0f89887bc2d46ff007f0934dcb41227b243a57b72dfaede194205b53", + "0x7050c9e0f4ca769c69bd3a8ef740bc37934f8e2c036e5a723fd8ee048ed3f8c3", + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x2e6801b8fc376da76a898facc556c0b535f9a457201e9444cf58395bcfbad72b", + "0xd7bea7c2cf853a7b968eabf0d1883e84d152c214f8854bba7f77cdcddc9c482e" + ] + }, + { + "address": "0xa2327a938febf5fec13bacfb16ae10ecbc4cbdcf", + "storageKeys": [] + }, + { + "address": "0x3155acd9f75915fcc21d34035f440da7040bd3ba", + "storageKeys": [ + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007" + ] + }, + { + "address": "0x8642a849d0dcb7a15a974794668adcfbe4794b56", + "storageKeys": [ + "0x0dbc3bcbd70ebd4873f6565e0eaef1f9a711a0125c32528ff3d19de5b1cfa7ca", + "0xf33cf1bff3c71852995efcfc5efe69c9bd0b4abc6c55a883e40847773e07e05c" + ] + }, + { + "address": "0x06780816e8ab8b2c68fbb9f889bacd69c389d298", + "storageKeys": [ + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a" + ] + } + ], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xde85de9ccc601d4bea7016fbe59d5ec35c14240f08f6a1d2a6583fd46d6b419c", + "s": "0x1fc55b0c2d9fa20a0a5899091789a18e57cece4d29cfcaf8f42d40af8c9e7715", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xd", + "hash": "0x0804ddf0e10cdb07caa7abbe2431d948a8fb600e0cf08b90f0f7bd82b10c8913", + "from": "0xb150debdd4c29ed3abfc323e712980a549dc6599", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000a5efdf00b565c37eb8a83d887d12885adf2b7193000000000000000000000000000000000000000000000000000000010be96316", + "nonce": "0x16", + "value": "0x0", + "gas": "0x11170", + "gasPrice": "0x115d0a2941", + "maxPriorityFeePerGas": "0x115d0a2941", + "maxFeePerGas": "0x115d0a2941", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x1183e3cd46df20dcc9746037000687b0cc2d466fe5741c99ee19c139aae22100", + "s": "0x6f669d9d2ca866b7b243ef6eb20f2a6fd791388a4b81be657caf4bdf19de1d93", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xe", + "hash": "0x61d359ea122a9640afced35a179afcc9ec5281998d8ca4c35ec3a02f592b8c64", + "from": "0xdb86b9ddb3ea90478da1e2b5b9728d7c078960f7", + "to": "0x2ba9dedf10f542397eacb993dc24bb48e92a349e", + "input": "0x", + "nonce": "0x1", + "value": "0x354bfcb51e1f5a5", + "gas": "0x5208", + "gasPrice": "0x1145a415fe", + "maxPriorityFeePerGas": "0x1145a415fe", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xf37c0bc3e6466cce8585790439b38af7025a937d6d74a9140c318fb8f2c23ebb", + "s": "0x4ce236afecf6c699e11d4cde4af8e528088b2955c230bc11587afdd51c921a93", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xf", + "hash": "0x295d156daa5338a7e5d1c5e78bf2d1c29f79c22652420d23c9f4d8d6215210a6", + "from": "0x292f04a44506c2fd49bac032e1ca148c35a478c8", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0x23b872dd00000000000000000000000001cedca12459e1ebb9ad543a00f17997dfc23cda000000000000000000000000292f04a44506c2fd49bac032e1ca148c35a478c8000000000000000000000000000000000000000000000000000000003b9aca00", + "nonce": "0x5ab7e", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0x104c533c00", + "type": "0x0", + "v": "0x26", + "r": "0xf888f6bf8cbb2fd6ef3b76dca6cacd350d8e67d187682e5a822810bd98239242", + "s": "0x457235dfa14fcb4491b3cd81e5e623d9638fbfb4de1e9beb41c5b17eb8278691" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x10", + "hash": "0x5ddd08dc4cf07fe2fb6f9f72e5e847a7feb548d8802021e8d118634e6418ddd8", + "from": "0x343deb850f8ad7e17499585503c87ca3211b7904", + "to": "0xc51a7ef83abd70e780d4f20383d06cddc05f9669", + "input": "0x", + "nonce": "0x0", + "value": "0x83f5d5e12484694", + "gas": "0x5208", + "gasPrice": "0xfc8a4b936", + "maxPriorityFeePerGas": "0xfc8a4b936", + "maxFeePerGas": "0xfc8a4b936", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x68881e23822b71aaa69795f6b2f448a1770de095eecb18f75d983a0681877646", + "s": "0x232b05c6bed3a92f17d9bd42b09dd79c5e013a5d5e3f822adac4b96e35b283e6", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x11", + "hash": "0xaeb9e7f459d1ed8d1b011f3573112c79720f61f0d8ae3a51d9eb4a50cea45cb7", + "from": "0xc4e5e85a093f28fadb5c7d3fef100ec2ae7dcd37", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a5700000000000000000000000000000000000000000000000000000000000000020a080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000160000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000ffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000642edf5e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ef1c6e67703c7bd7107eed8303fbe6ec2554bf6b000000000000000000000000000000000000000000000000000000006407596600000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000415bb5cfac00ed23cd32c1a18fc37a0147df4847e3c0068d1bfcc9e438dddf792a3d542989b55ee24534a7c76e533d7cedc20cdf44ef693f9bf5da5318f1ee19d61b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000001de029b000000000000000000000000000000000000000017d18bb0b4ee72939e1f09f1f00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000002d626b687fe7b1cacc1da0bedd019c66c68cbfc9", + "nonce": "0x1", + "value": "0x0", + "gas": "0x4b5aa", + "gasPrice": "0xfa1ea0775", + "type": "0x0", + "v": "0x25", + "r": "0x331069180561897659f2d83c70742bb8df9fddc10a20c72ed2e0bcdaeb083006", + "s": "0x27124b253c6f1fe873b5ad5749bfa2ce70bc6fa8d96619e7598b33c51fe61b5e" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x12", + "hash": "0xcab839b1a2501e91f70412d197acff334aa4b7118803947aa225861d0831abd9", + "from": "0xd64ffec0465eddebfe008695b41d62ebcb02ed74", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000199c8b5bbfb0f27387e02127a2e0ef2f4d12d1680000000000000000000000000000000000000000000000000000000005a995c0", + "nonce": "0xa5f", + "value": "0x0", + "gas": "0xf6e9", + "gasPrice": "0xfa1ea0775", + "type": "0x0", + "v": "0x25", + "r": "0x7a8106228fddfd583d17d5473b5d89932946855fa9c4190eee335e2632e3353f", + "s": "0x72dcc25c03bad11ca1ede2561cf003819bcf23ebead3e5f96b9c94dec4af8d93" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x13", + "hash": "0xdcb4bf7b1537aba4d6e1c32a8ed1bb6c36d9fc0cd02594b8534081cf9ad2b4c7", + "from": "0x166434d678c9a7fc0fc1e5d35df4a5cf4cae0914", + "to": "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "input": "0x791ac94700000000000000000000000000000000000000000000bfe9273843808fc1758d000000000000000000000000000000000000000000000000000333398ecf549900000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000166434d678c9a7fc0fc1e5d35df4a5cf4cae091400000000000000000000000000000000000000000000000000000000640753e900000000000000000000000000000000000000000000000000000000000000020000000000000000000000004556114bc592c943bc39d2ffa5b5924af0f1edd7000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "nonce": "0x1be", + "value": "0x0", + "gas": "0x80a7e", + "gasPrice": "0xf7d91de8c", + "maxPriorityFeePerGas": "0x12a05f200", + "maxFeePerGas": "0x1446a209f5", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xe3a5ff80002ac09b219273c2553b754f8174551b8154a59094e13d40bea40b1c", + "s": "0x29bb73cc93102ac7864cab88e10f8c6cbb18156dbcbadb3b2d000dbb9d3912d9", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x14", + "hash": "0xf78df1e4fdcddd68f50a7419994e3ec73b11feec8445b5e999033bf2822d63d8", + "from": "0xa54ac807ea6a11cfc698048cd5a4fa96d97f5dda", + "to": "0x92e929d8b2c8430bcaf4cd87654789578bb2b786", + "input": "0x9ddf93bb000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000df84758000000000000000000000000000000000000000000000000000000000ded75dfa00000000000000000000000000000000000000000000000000000000000000011555344542854524f4e297c6a62393031680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000022545568627762367945753159635659716438595a7333357031455468595a68703339000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x2b", + "value": "0x0", + "gas": "0x11051", + "gasPrice": "0xf66d8e580", + "type": "0x0", + "v": "0x26", + "r": "0xce7cb475503cada69a51429f26a433825f08727c7f6966a00b46c2c664109bd8", + "s": "0x50d7ae43c56a46aef830bc0f0555cc9aab3f875aa040b7f3b603b0aef09afe1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x15", + "hash": "0x9a5f9ef55f39387ed652c9dd9739d06721858bc3cf2de753193d1705ccf4c449", + "from": "0xffbae266745478620c57848c4cc12ec016005173", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f00000000000000000000000000000000000000000000000000000000000000020b080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000002c68af0bb1400000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000002c68af0bb1400000000000000000000000000000000000000000000e7e9e48b8bd1e1148c1f627000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000002d626b687fe7b1cacc1da0bedd019c66c68cbfc9", + "nonce": "0xa8", + "value": "0x2c68af0bb140000", + "gas": "0x3645f", + "gasPrice": "0xf66d8e580", + "type": "0x0", + "v": "0x25", + "r": "0x6508f9dd4b89fca5f27b10bf4d6da5fa467137f5712c41a91af34101221af0e0", + "s": "0x876092d99ffce3687b13c3e6e2a4fb0c4d6d7d5f71006348000885287b7e00d" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x16", + "hash": "0xa3aefa016557979ace85ee305850e69364084e9e74c7a6e6b78f3ef6614a395f", + "from": "0xc320e4aaa6aea16ba668252dad3df53e85743901", + "to": "0x2d626b687fe7b1cacc1da0bedd019c66c68cbfc9", + "input": "0x095ea7b30000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488dffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x446", + "value": "0x0", + "gas": "0xdd62", + "gasPrice": "0xf66d8e580", + "type": "0x0", + "v": "0x26", + "r": "0x4a2bd0ed91cf1939d6e6b812cdfa67d2bde62a225ada162b0bfbf6acb75ac036", + "s": "0x7c4175d4c5a01cf5b6693b9af39ef3e6ff9d10e4f07c973a8c8cb40bcf9a5fa1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x17", + "hash": "0x17746b953d853d6f1cfede2f1bb378b37bd03a18b6ec8f2b4770d00fd8f17fac", + "from": "0x443a55edfa8d0f32bfc89d54cccb6e0dfbac5975", + "to": "0x0a5948fc882ce2376d922f02efc721cc44001e7a", + "input": "0x", + "nonce": "0x3f", + "value": "0x470de4df820000", + "gas": "0x5208", + "gasPrice": "0xf5de81400", + "type": "0x0", + "v": "0x25", + "r": "0x1697526611baba0cee40b1328e4772d6b21a12ded9b596ba26cb28d3c17c8980", + "s": "0xf066715ed01a0bbdff55cb27ad3249870f1eaae3302fe6dc9b4aec2eddd8e4c" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x18", + "hash": "0x2bb8f36504a329b8d8eb4031a895aa952e1007c24843a531d3bf29093872f86a", + "from": "0x23f4569002a5a07f0ecf688142eeb6bcd883eef8", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb00000000000000000000000009b6b873f9a7e0f4dcb2f4aadb6be39912aa1b300000000000000000000000000000000000000000000000000000000005f5e100", + "nonce": "0x5a1d1", + "value": "0x0", + "gas": "0x3d090", + "gasPrice": "0xf5de81400", + "type": "0x0", + "v": "0x25", + "r": "0xcea66330c255d619b4e71ff620e7ddbcf02b9a201f2d9996bcebba17d6efe15f", + "s": "0x2b0241d1aaa0b7a0eb98e3593f38a7f63f4c30256712e6fb31df19e6ce785775" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x19", + "hash": "0xa5f37749b48fba5435caa821e7078e33060234774d292ec350c6a96627608c1b", + "from": "0x00b3c079ac6f7f90560983f90f83e1409ac1ebd8", + "to": "0x127350408e658c8a02c588d3487f11939fb20110", + "input": "0xcd423f3ab39a11ff1d9208b7d37df56e902c932b0300000000000000000057470b5d4d2019f4c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "nonce": "0x86", + "value": "0x0", + "gas": "0x195ba", + "gasPrice": "0xf4dc6aa13", + "type": "0x0", + "v": "0x25", + "r": "0xc3eecf789c6b826ede8163a34d89122aa330ea4ff551252d8729382ccc248bf0", + "s": "0x213ff41e1c387ac241e77fda91e231959e242c6b2b1552430a3d850eeaa1a98b" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1a", + "hash": "0x5c835b4a702a2237155c2d82cffbdfc126e27d844439da159c193b2932881d0d", + "from": "0x1570c66fec9862f1295f07fe74eda2196a93f34f", + "to": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "input": "0xa9059cbb000000000000000000000000c3ce0e9c88a4a0d48634cb2928cee1b1c02bfeb2000000000000000000000000000000000000000000000000000000003a699d00", + "nonce": "0xd18", + "value": "0x0", + "gas": "0x10267", + "gasPrice": "0xf4dc6aa13", + "type": "0x0", + "v": "0x25", + "r": "0xfe19bb297c813f940d65f57cba7a4b2d339a8acdd62fd2202e27193522559e4", + "s": "0x486df2ed28ef058c082c51d556167f64473109f2aafd563455b615c4c10cc97a" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1b", + "hash": "0xe8de77052c003d04d9c4ac6fd98f0887fb0d8af07b90c05cd4314ccbd2fbc0df", + "from": "0x738cf6903e6c4e699d1c2dd9ab8b67fcdb3121ea", + "to": "0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0", + "input": "0xa9059cbb0000000000000000000000009f0c8e1f71cf4cd20310ecb685b4427565d9348c0000000000000000000000000000000000000000000000d8d726b7177a800000", + "nonce": "0xd6ba", + "value": "0x0", + "gas": "0x30d40", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x560a148b26d5db31d83646d9f93a54ac28f5eea90ebfcd8632fbbf460a2b7833", + "s": "0x3dabe128c635605f0557705e20126f54f868f5797391a72c667f2dd2522b888a", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1c", + "hash": "0xef8053a8139ce91fcb0171044f4f9116e1b726e315233b30663c73069b27846d", + "from": "0xf16e9b0d03470827a95cdfd0cb8a8a3b46969b91", + "to": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "input": "0xa9059cbb000000000000000000000000a03bd291b6bca1549b48ca46f20c443913f6ec3d000000000000000000000000000000000000000000000000000000000d60c387", + "nonce": "0x190dc9", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x82d3eba7ef773a1737616c3fa03fdabd9d5ba336a38dc1047ead52a78b5aab7d", + "s": "0x502ce0475561dc294406596bbd8b8598fe28a1d852adcb0b0e7574f8a03f421f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1d", + "hash": "0xc775da93e31d3d796b9c247fdcba867752eed4b372f270c3f23a7a91819837ef", + "from": "0xec30d02f10353f8efc9601371f56e808751f396f", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000ece8998244d5fb8ddc72f8994c60c665f3ae2c6b000000000000000000000000000000000000000000000000000000002a9e08c0", + "nonce": "0x7d2ed", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x5740dd5ef90cfc07ce1310c45d81ce166ebe4c4e3ab9a82e1d058f44d39aebb4", + "s": "0x5b178980167592b5bf22fed5ef68ed147aed8a3738eb1d8f4c0452f586817c03", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1e", + "hash": "0xccbcf72b780986d0633923247690f98d6d47d2140ff5a6c6f2c9c7461b4c1560", + "from": "0xcad621da75a66c7a8f4ff86d30a2bf981bfc8fdd", + "to": "0x94e093488c4a81ac88c2020df73cf5bf5621987c", + "input": "0x", + "nonce": "0x10d65c", + "value": "0x214e8348c4f0000", + "gas": "0x13880", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xd00b2f790c5d4550c6696caf287349a1909a1cb97e8bef7a222e790c7917a95f", + "s": "0x47f5e06b9ff6789d29e39cbb6115cd1fe5b01af223ed540a55acd7eb093700bf", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x1f", + "hash": "0x51d0639a571d8c7b3533429598114a6e4d1e81e7cf13687f6c269219c244a123", + "from": "0xf16e9b0d03470827a95cdfd0cb8a8a3b46969b91", + "to": "0xf4cd3d3fda8d7fd6c5a500203e38640a70bf9577", + "input": "0xa9059cbb00000000000000000000000081153f0889ab398c4acb42cb58b565a5392bba950000000000000000000000000000000000000000000000003ae99ff56002c000", + "nonce": "0x190dca", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x7b200dcffdb124a6f19cd709e980f098b7f3005c2b0205b76aeaeef336bc3e40", + "s": "0x22b351e5e8e3fcf262ed06af550727e77fd90c765e5134c7688e6d592b13115d", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x20", + "hash": "0x14f49903cfc10f40d43fa0ade394883d710320c2db90034ac34c92cd6d09e2a6", + "from": "0xb1bd29e35d7ca6f499ffc4180357a4d237e1e0de", + "to": "0x81153f0889ab398c4acb42cb58b565a5392bba95", + "input": "0x08bbb82400000000000000000000000000000000000000000000000000000000000000000000000000000000000000008305cda6ae133e5ced575dd6806234f328a1b572f44d93860000000000000000000000000000000000000000000000001189bee976a6daef000000000000000000000000f4cd3d3fda8d7fd6c5a500203e38640a70bf957700000000000000000000000084e34df6f8f85f15d24ec8e347d32f1184089a140000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x255f", + "value": "0x0", + "gas": "0xaae60", + "gasPrice": "0xf4ce30064", + "maxPriorityFeePerGas": "0xf95713d8", + "maxFeePerGas": "0x1db498c384", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x210197edec493a036ec3c46fcd00a65e7d01d0dd18cf5a12f03648074a9fabfd", + "s": "0x35bd13003dd0cb9b5c674a5beb37a5f077664251abb07ed4fff3367aebb2ab35", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x21", + "hash": "0x824c5b85afd80a139e95f0f9c14619bd36c7fbff2e75e297f99f10df2c1a33ee", + "from": "0x077f6053a90449f43d28c38e00123829b1f4dfac", + "to": "0x280027dd00ee0050d3f9d168efd6b40090009246", + "input": "0x6234ff19000000000000000000000000a3f558aebaecaf0e11ca4b2199cc5ed341edfd740000000000000000000000000000000000000000000001cf2e0cab446f400000000000000000000000000000000000000000000000000000be6bdf9272e24800000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000200000000000000000000000005a98fcbea516cf06857215779fd812ca3bef1b32", + "nonce": "0x10ab", + "value": "0x0", + "gas": "0x3cf5a", + "gasPrice": "0xf31601e0e", + "maxPriorityFeePerGas": "0xf31601e0e", + "maxFeePerGas": "0xf31601e0e", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x5341690f0a8bf4cc9304284cabaef0584ca7819fddf9766f6c0a830602255ab1", + "s": "0x1aded4ccf9be8047579b4313cd0ca8453a2ee2325c0a51d6200b007734fe0e2f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x22", + "hash": "0x36b47deefcb93a72ce939760457f1bdd7b6c6daf2e9664abc63be98ddaf02801", + "from": "0x3a3702a2575fc282c0892bc4907817fc19b60052", + "to": "0x3a3702a2575fc282c0892bc4907817fc19b60052", + "input": "0x", + "nonce": "0x81e8", + "value": "0x0", + "gas": "0x7530", + "gasPrice": "0xf186bb44c", + "maxPriorityFeePerGas": "0xc4dfc7c0", + "maxFeePerGas": "0x17d8ff9964", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x4372176ccbc8440614de7aa25e7eff9f74cb21da818d653574a7db104f7ae50a", + "s": "0x4859e48ba4d262a56c928b817edfeed1b681cc516d973f912ce15e56a05c19cd", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x23", + "hash": "0x869339718ff7cd50e75f7810c451b40d9bed3c7aac235b88ebad541ead66938d", + "from": "0x903c26a3d690bf010fd6441328983925852ffe4c", + "to": "0x903c26a3d690bf010fd6441328983925852ffe4c", + "input": "0x", + "nonce": "0x821d", + "value": "0x0", + "gas": "0x7530", + "gasPrice": "0xf186bb44c", + "maxPriorityFeePerGas": "0xc4dfc7c0", + "maxFeePerGas": "0x1ad08c03a0", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xd94bc4f9c0bc2f22efe53a76e546b9bede78899749b038a206393fbc44670b93", + "s": "0x1b9bb23e8371cf2ee072e3e99cbc86018f3165f80504930fd3683050f04f447d", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x24", + "hash": "0x166c8800dfd57e1aba5e1086979ef9f8b7b642572eb49bf79ede76b92853e8fa", + "from": "0xb18ccf69940177f3ec62920ddb2a08ef7cb16e8f", + "to": "0xd249942f6d417cbfdcb792b1229353b66c790726", + "input": "0x75ddbe8b00000000000000000000000099b42f2b49c395d2a77d973f6009abb5d67da34300000000000000000000000025f8087ead173b73d6e8b84329989a8eea16cf73000000000000000000000000000000000000000000000229637dffc69a6ef03b0000000000000000000000000000000000000000000000001842bcdd468993c6", + "nonce": "0x89ce", + "value": "0x0", + "gas": "0x2491b", + "gasPrice": "0xf12800546", + "maxPriorityFeePerGas": "0xbef418ba", + "maxFeePerGas": "0xf169980ba", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x358d91e598009b7d04781cbed4b16a0089dffc329d05714a1b59cdc7941ed835", + "s": "0x1d534e44c64a12c9221f228552bd744288043085e3c8af3dca632582130bca59", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x25", + "hash": "0xf36a583a10a569c775917ac946fb55d554f212f7fbeebe889b950365614c32b2", + "from": "0xb287eac48ab21c5fb1d3723830d60b4c797555b0", + "to": "0x34800d4ba1543891982e38ae705fb039feb66b54", + "input": "0x", + "nonce": "0x4af0f", + "value": "0x980e5f8c6300000", + "gas": "0x5208", + "gasPrice": "0xf07ca808c", + "maxPriorityFeePerGas": "0xb43e9400", + "maxFeePerGas": "0x13eb2779c8", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xf19470f36585fb265ac3d4d3deaae1f07b4c9ea86ac37c3267978f18b156176a", + "s": "0x3d0fd2333f57f0eb101109fb7dc40745dae275266e396df95d29b179e13aad", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x26", + "hash": "0x65af273087c5a3125af9bde12e9f5ffc0b144cd06ffbe60abe394a0743ab25c8", + "from": "0xd42f958e1c3e2a10e5d66343c4c9a57726e5b4b6", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb0000000000000000000000003aca9f83bf926cab0410c4b02fb990af051eb05c000000000000000000000000000000000000000000000000000000003677c960", + "nonce": "0x9143", + "value": "0x0", + "gas": "0x1d4c0", + "gasPrice": "0xf07ca808c", + "maxPriorityFeePerGas": "0xb43e9400", + "maxFeePerGas": "0x13eb2779c8", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xc6150c09a9f2143ab4f704a58f82c9d3e54e393f417be0d4dc1aeb06f0049139", + "s": "0x99044f995c8e211cdc3c40c05bb4ae0c80c43a38b10cd83c45b2445c217dbf0", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x27", + "hash": "0xf0ce3d192ae5e757892616fdd8ba1f1b078b6e4d2517dd418837799f359e7866", + "from": "0x629835afd9b95f02b2bc76743f94db2be54b1840", + "to": "0x6c5ba91642f10282b576d91922ae6448c9d52f4e", + "input": "0xa9059cbb0000000000000000000000006cc5f688a315f3dc28a7781717a9a798a59fda7b000000000000000000000000000000000000000000000919672781b6699e0000", + "nonce": "0x2112", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x1ca35f0e00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x272808e18dff3e3988e84a505b1a9ec74d9df0017ee216f1784fbc783c4afd8e", + "s": "0x1e7b3e2f31c9b7124658a720534ea788b4c5d17b665976300afb2a52013b18be", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x28", + "hash": "0x06dea25e4a05a79a2378d21a741846603533577e1df120d7558f9a77557f97e4", + "from": "0xdb9842fd1f92e14f18742f4d26afebb6586692e3", + "to": "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "input": "0x791ac947000000000000000000000000000000000000000273d73ca151aac179951c0000000000000000000000000000000000000000000000000000068fb0b928f6902500000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000db9842fd1f92e14f18742f4d26afebb6586692e300000000000000000000000000000000000000000000000000000000640753ea00000000000000000000000000000000000000000000000000000000000000020000000000000000000000002d626b687fe7b1cacc1da0bedd019c66c68cbfc9000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "nonce": "0x310", + "value": "0x0", + "gas": "0x4f5bf", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x13cf6c75f5", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xffc8e5995eaa66d97fe61453e563da28c27854aa5505b26ebab85fc2ecb963a4", + "s": "0x1d5afbc985ce8840b1c14b7d5996715ea6b935d2dcf3f6ee41de6a920600bcdc", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x29", + "hash": "0x8b9e17075b33a4b2e3f0cce72c4620ecdd63a9329833ef85959ad21d850753b9", + "from": "0x3158de883bbc058734aba1b877ffd3c755938903", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe343fe120000000000000000000000006d74443bb2d50785989a7212ebfd3a8dbabd1f60000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000045804880de22913dafe09f4980848ece6ecbaf7800000000000000000000000000000000000000000000000000000001a130d7a00000000000000000000000000000000000000000000000003558a10889893800", + "nonce": "0x5f8f", + "value": "0x0", + "gas": "0x61a80", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x15a8f52c38", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x7a43ea147e1e1b348cd798cd6b6bcde38aabb27da602b996189d1966828ed82d", + "s": "0x4d3ede04ca50a1fdcbe9995ed5bab4d445cfe264dc283006597237cc973b3e19", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2a", + "hash": "0x3c60157c468948d9a27d5793998ac1492f16e5448ad1222f2fa70c27f2d762c7", + "from": "0x5eb656432b07fb784c0455e58f865bde2a7fac82", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe343fe120000000000000000000000009c4fe5ffd9a9fc5678cfbd93aa2d4fd684b67c4c000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000045804880de22913dafe09f4980848ece6ecbaf78000000000000000000000000000000000000000000000000598d33d514dcdff80000000000000000000000000000000000000000000000004be8c7a78aa914f0", + "nonce": "0x13e28", + "value": "0x0", + "gas": "0x61a80", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x185ad44554", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x50d4c51e61d6d8fbadb6dce5af465611fecf81b0e5d5203646befe54c6dd6471", + "s": "0x78cd7c7b667a94209ae1d7e41566500ed6abfb27b5c275ddab6016f8897caf97", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2b", + "hash": "0x6769b31a63c7d833625d3be60051b84170bb270b5cc5b4b999016405ce7c0255", + "from": "0xa7b53a214b6ce0fffccce7db622f5b22030115aa", + "to": "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "input": "0x791ac947000000000000000000000000000000000000000000000000000005075c5710b700000000000000000000000000000000000000000000000000a18397bf62da7f00000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000a7b53a214b6ce0fffccce7db622f5b22030115aa00000000000000000000000000000000000000000000000000000000640753ed00000000000000000000000000000000000000000000000000000000000000020000000000000000000000001c036473a058a83e2eedfc0b46bcf6c5a6838240000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "nonce": "0x1c", + "value": "0x0", + "gas": "0x3b47a", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x16302240d2", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xf2c6ff4bca4a4e342e51db22b827fc99e0e5eb52aa77ce0f4c35898aba8b4f55", + "s": "0x1169bbd587a91ea787a61ddac39ee268e935b55358a733d70171eb5b9e3e8765", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2c", + "hash": "0x40e95d9439ba3af8e7b906c143e3f18eaef3ae3bd757eb2193a93ab068c6e854", + "from": "0x49aafc5acfa5262e995231e7a39b0616168d96b8", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe343fe120000000000000000000000006d74443bb2d50785989a7212ebfd3a8dbabd1f60000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000045804880de22913dafe09f4980848ece6ecbaf7800000000000000000000000000000000000000000000000000000001a130d7a00000000000000000000000000000000000000000000000003558a10889893800", + "nonce": "0x5f0f", + "value": "0x0", + "gas": "0x61a80", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x185ad44554", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xbf864b5d22b126d0a0e15b8c11ab1407a98512226db0ff57d3594cdfe9c4af20", + "s": "0x22f82cc3f790af74cee4ca0e8ea3b81366e53182d005068a42ef257d0dedf209", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2d", + "hash": "0x6032f5cafb5a41bf65eba53b70b2198075a2da5b0997b8e3acd7abaeae2ef0f8", + "from": "0xaecb08eedd0db966bb62b21f34594dbabcc3674b", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe343fe120000000000000000000000009c4fe5ffd9a9fc5678cfbd93aa2d4fd684b67c4c000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000045804880de22913dafe09f4980848ece6ecbaf78000000000000000000000000000000000000000000000000598d33d514dcdff80000000000000000000000000000000000000000000000004be98ec70d5140f8", + "nonce": "0x5ee2", + "value": "0x0", + "gas": "0x61a80", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x185ad44554", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x75556a770c36c9b7d88059e8ad999de960dda4417c7652b1e37624730e33c016", + "s": "0x6741e274b672ab0b71b6e5c2690e25b38b5010e0e620f9d0dd956bd136e705e9", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2e", + "hash": "0xd9f70a3e0d16b61dc3a1388243c7fc612d68212d7d199f9cffe1bb9205e70593", + "from": "0xbf94f0ac752c739f623c463b5210a7fb2cbb420b", + "to": "0x7521668f9421b7347945872e082f52f116137ee0", + "input": "0x", + "nonce": "0xa608", + "value": "0x382d628d82c8000", + "gas": "0x33450", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x5d21dba000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x4a184d1681e3668d017f4713958dc3a5842e6df560d4d6ebd781c60f505ee2e3", + "s": "0x5ae8ba32104f7d265c7efb5e57f59ae1cd8ccce9fbac16fb01fc23a5917ab16a", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x2f", + "hash": "0x3c5cea353fd27856801af1e5e14df3ed5091d41c2e1ae7d8d19ea878605e8853", + "from": "0x4b4e14a3773ee558b6597070797fd51eb48606e5", + "to": "0x647b8d99de5778ea8df1347f4e2a8c0734578b86", + "input": "0x", + "nonce": "0xb6d4", + "value": "0x162b3f3766000000", + "gas": "0x33450", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x5d21dba000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x9aecd60b7da8a1cf67101d4e15493d7e5a7f3edac81463e297899371f5abc2e2", + "s": "0x5e226ac3aa2a34e1c150285ccaf0353a4fb21a33a8650d879e8f94ea78988d15", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x30", + "hash": "0xa67712303911aecc0f7d1a6a4ddfc8de10f733b309a279043e88f4b5e870240d", + "from": "0x4b4e14a3773ee558b6597070797fd51eb48606e5", + "to": "0xa294cca691e4c83b1fc0c8d63d9a3eef0a196de1", + "input": "0x", + "nonce": "0xb6d5", + "value": "0x3833d549210c70000", + "gas": "0x33450", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x5d21dba000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x77ec8f89acc9c10870b70946b730e41e03a8cee433c809d05dda54ab3151ecbe", + "s": "0x33ac0c5fc835fc702429f42c4c9d17797b852ea7bedfe6fa8b73edb158ac10b3", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x31", + "hash": "0x631aacc903e8b7402faea6914c9ce7e3dee6bf25ffa6ce2a9479641c7c45bced", + "from": "0xfdbaa5eee7cdf45eec3a71a672065a67191fda7a", + "to": "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "input": "0xfb3bdb410000000000000000000000000000000000000000000000102f657080a13efe4000000000000000000000000000000000000000000000000000000000000000800000000000000000000000003aeca8397cb16cd3ed02859fd0427988deb8ec3900000000000000000000000000000000000000000000000000000000640758290000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000990f341946a3fdb507ae7e52d17851b87168017c", + "nonce": "0xefe1", + "value": "0x2385bb314ec3b3b8", + "gas": "0x493e0", + "gasPrice": "0xf065c4a8c", + "maxPriorityFeePerGas": "0xb2d05e00", + "maxFeePerGas": "0x161e70f600", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xa054e318e7e46d717db56fd96442f39ab71250ddd4541558eff91b8a7de6187a", + "s": "0x3dfba7b32ace962b7abaa5ac2f3951c018b353bd6111742e5db99472cde23ee3", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x32", + "hash": "0xa1979b0a229d22852c6c7cfa2d2ea1b1235b4bbfcc24208fd0c5a6aea7dd57af", + "from": "0x3b8e1903ed6d4da2c7f88422b72c4b182570293c", + "to": "0x2b95a1dcc3d405535f9ed33c219ab38e8d7e0884", + "input": "0x47e7ef240000000000000000000000003b8e1903ed6d4da2c7f88422b72c4b182570293c0000000000000000000000000000000000000000000034a9e5fe8e057fc00000", + "nonce": "0xf9", + "value": "0x0", + "gas": "0xabecb", + "gasPrice": "0xee88ee58c", + "maxPriorityFeePerGas": "0x9502f900", + "maxFeePerGas": "0x1cfb1277a8", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x40cc06f0918127743e769cd35df1a27928c43b252cc7888789615d30ca104716", + "s": "0x34ed0033c9ee0a472990120a8c46e4672eef6dbcb0461df9c2b6f056ac526690", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x33", + "hash": "0xb392204207177698d3d8f478f04e94cd70aeb674ae66bd2728222cd220e66839", + "from": "0x66c57bf505a85a74609d2c83e94aabb26d691e1f", + "to": "0x6b175474e89094c44da98b954eedeac495271d0f", + "input": "0xa9059cbb0000000000000000000000002cd6749cf31e752a4f192f496ee4340282610afc0000000000000000000000000000000000000000000000003dc1936c427d0000", + "nonce": "0x17906", + "value": "0x0", + "gas": "0x7a120", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x3ad2c94896", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x8c20c014f83c0909c8ff8886ceceaedd9cafd3c478d51ff43c64ddfd2046615b", + "s": "0x58a9cdbc6a896c32f1d2ef0c9e2b8f39141e41f433c682b116c3656bb34820b6", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x34", + "hash": "0xc4ebd2b7062fd4c56042caeda7f11ed2f22658ef570f1c878449caae140c7a5e", + "from": "0x226e72174e5b3cd5bf558c7be67a23639475ceaf", + "to": "0x16d5a408e807db8ef7c578279beeee6b228f1c1c", + "input": "0x55362f4d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000100f835c73bda80970000000000000000000000000000000000000000000000010390b27153bb17df", + "nonce": "0xa8", + "value": "0x1158e460913d00000", + "gas": "0x57a19", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x13532f7e00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xe212f3e9f4625e5dbbc42306c948cb14efc3ca2b07ce896ba0439c5ebf2a695e", + "s": "0x67d718da0609b4528127b913a77727f92e68a04a4f30627e6b19dd95269e0ac2", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x35", + "hash": "0xf7bb1b54eb539fe1106e31ff111955e166a9caa3a69557350a243f9b62a77100", + "from": "0x56eddb7aa87536c09ccc2793473599fd21a8b17f", + "to": "0x11409bc96867486384d8a680ddf87dea0a57d310", + "input": "0x", + "nonce": "0x435989", + "value": "0xdcf7af614b33000", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x39e9d3fbe9ecbe76fd6f5ce94a9e20b327eedff15a0a79896b56937d3e8f2152", + "s": "0x1823ae0b07bfc917eb4c1426df395c9586aeea53ae6fe040f249bdd8ba37fc9b", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x36", + "hash": "0x996a68c6d819af761c9e1077b3f5d60806129ace3e924dbf7efb10353cf0f0cd", + "from": "0x21a31ee1afc51d94c2efccaa2092ad1028285549", + "to": "0x95ad61b0a150d79219dcf64e1e6cc01f0b64c4ce", + "input": "0xa9059cbb00000000000000000000000048da2e9e314db3f8d2ad72b443763ca1dc97488300000000000000000000000000000000000000000004709c15d5c8f17d170000", + "nonce": "0x58a256", + "value": "0x0", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xba5489c75130ddce9a1e4154cae8bbbfc755ff628d0092652958855792c613a7", + "s": "0x59d2cc15ced27a750efcd6886b73d2afc37d654fe0b80efe6a18adcf8534033c", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x37", + "hash": "0xd45c86ebe3f9208d98c24c664c7244c2e96ec37245ed6de1152eddca5d8405df", + "from": "0xdfd5293d8e347dfe59e90efd55b2956a1343963d", + "to": "0xd533a949740bb3306d119cc777fa900ba034cd52", + "input": "0xa9059cbb0000000000000000000000005d09a20d45dc594fa49b924a730c1d278928c88a0000000000000000000000000000000000000000000003cbc1331e82476f1c00", + "nonce": "0x549fc6", + "value": "0x0", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x3893039b19e8f183b82d2100a670fb64c2dc0861aaea69361b1e196b111fad15", + "s": "0x4761dc48ecba9879d10fc041389a427e37edd002fd0734cc03bc68862aafe83b", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x38", + "hash": "0x045653392ec35e4b07f860720cc33c10bb20587aa7e240031996cbd8b2a0f7ea", + "from": "0x28c6c06298d514db089934071355e5743bf21d60", + "to": "0x579cea1889991f68acc35ff5c3dd0621ff29b0c9", + "input": "0xa9059cbb000000000000000000000000277834ffb8f94908ee6ea5eeae28bdbf995ef4e000000000000000000000000000000000000000000000d39c7c9f81e5d7e80000", + "nonce": "0x5cd470", + "value": "0x0", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xd28958622ebf9ffe6f22d2652f2a254caf1628bb588e6b096bad42c7ffb99dc9", + "s": "0x3c8a7aa8e15df08c498ea115556982d27d5f04b9ea6b7ce7e2d1c6d203c20dc9", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x39", + "hash": "0x8665dee17386adefd9970a377b160dcb074831971cdb1930f67ff0e58892406c", + "from": "0x9696f59e4d72e237be84ffd425dcad154bf96976", + "to": "0x2f0f5ec05ef07a163ba1acc02abf288296f9e28f", + "input": "0x", + "nonce": "0x3e0976", + "value": "0x131a6731de8dc800", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xeef44c9c8bcc40c16fc650d29ec8970132c3e55aa25c4b49f7c486fa57de8006", + "s": "0x893f8d4f599637034c63e1a248db614adba44b9fd0b5256226d10dc3c27df00", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3a", + "hash": "0x85d45d5fb4e93b09033ff4feff5e14da36f6341ee29e56a2738dc3da597224d1", + "from": "0x21a31ee1afc51d94c2efccaa2092ad1028285549", + "to": "0xee46c0f064f2c74bde8881343ca06d599680c54e", + "input": "0x", + "nonce": "0x58a257", + "value": "0x8ce34dcb29e800", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xd4a845fd35bb957bfa504faf3445da503888e526c27416b7c3ddfb0a568a6335", + "s": "0x7d1a7b9160c13255bedbdf3a089d9cbd2436bcf54726976bd3270837d69107a5", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3b", + "hash": "0x800921a644c0b7a0ac4e4f3f14065bc54f155ea6b7dff037bf2635f541802ba3", + "from": "0xc1e2a676c2cf7c8120e927396642324b4a3c0e75", + "to": "0x000000000dfde7deaf24138722987c9a6991e2d4", + "input": "0xe343fe120000000000000000000000002e8b9d1fd95cc65ad0927ca6246c34182fae538a0000000000000000000000000258f474786ddfd37abce6df6bbb1dd5dfc4434a000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000030312c0a40000000000000000000000000000000000000000000000000000000007801cafc", + "nonce": "0x5f3f", + "value": "0x0", + "gas": "0x61a80", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x15a8f52c38", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x7e48d03ee2528ae11f439c9d4e62db68b14d225a9ba9b4ffbc5f3255c942823f", + "s": "0x26b457021f8359f3b21253753224b1234249ebc648c39888e9abadbd2852458a", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3c", + "hash": "0x0b279aabfbcf72370c8310ffed64a35aa72a88554c4281aae16f33ce055a6104", + "from": "0xe401a6a38024d8f5ab88f1b08cad476ccaca45e8", + "to": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "input": "0xa9059cbb000000000000000000000000f89d7b9c864f589bbf53a82105107622b35eaa400000000000000000000000000000000000000000000000000000003434d9fa40", + "nonce": "0xd43", + "value": "0x0", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xec93085973f73f1ad47a4b358a85a1e6d58cf06a6f0750cb96e7c80154557716", + "s": "0x2875ae71564b566e24f1752bfaa470efc978a4a94b9cb9af8bd1ef4dbf8ac0d1", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3d", + "hash": "0x9392d29d5b6608e6518776369e34b551598e8c58cd247aca749175c63ce830c3", + "from": "0x6750815b07bdf6a857610f9a824c21ef7989e0c8", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000f89d7b9c864f589bbf53a82105107622b35eaa40000000000000000000000000000000000000000000000000000000001850814f", + "nonce": "0x21", + "value": "0x0", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xc0bcb4c4d4488d0455cd9110674930ed90b775db70f8f11ed808592a283ba34e", + "s": "0x7dbd77386bc5ae792a8b1142c0b78e43d1c11bf04180b9168dfd21a9eaedc364", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3e", + "hash": "0xedefd0b413d25c3fb4e8543c6654c4679f67bca12f120c617e8f7f262b6b4b6b", + "from": "0x8e2f5d927957d0034b1bd13bb10098b83fcd9b13", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000f89d7b9c864f589bbf53a82105107622b35eaa400000000000000000000000000000000000000000000000000000000017069f84", + "nonce": "0xe", + "value": "0x0", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x432f63b61c596f6223bb1d2bc10d73799863dd17ff57dbed33919d07d07da205", + "s": "0x31cb992e128f69ea86419390ef06de1e52994488be1baeedf605894718850c87", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x3f", + "hash": "0x67f8674dc6e1d1c7a0d35e14fe810936a71b7a228c062e3f6af9bd03546e6d5e", + "from": "0xf89d7b9c864f589bbf53a82105107622b35eaa40", + "to": "0x176f1ddb29a9252a664f8e6cb5e4fb1cac9d7c46", + "input": "0x", + "nonce": "0x81138", + "value": "0x210a4cfc6940000", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xecd9e1d93a76bee85b03f3f6c13569a4adf6ae0d7e66646e5991607b3bed709d", + "s": "0x3baa95d6fc86978ff0ec90444671e037f8f9e2d328854a568194a033a3d9708b", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x40", + "hash": "0x6841fa17c9ea67e1552ce657f459f2a8a803ccb581739933a837b21fe2061f3d", + "from": "0xa3366e2ad9782a371a29a7c9f3a0055511f24485", + "to": "0x2d626b687fe7b1cacc1da0bedd019c66c68cbfc9", + "input": "0x095ea7b30000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488dffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x1c36", + "value": "0x0", + "gas": "0x186a0", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0xf224d4a00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xf4acbff28b43b58d4b41945c87e61255bf4bff721c000e605d24c2f7033794f4", + "s": "0x3e1c9c9024eaeccf0cded98540b05728acd39851e2b930932d7c1be1221ef2ab", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x41", + "hash": "0xca5ce99eeded0ed7e8783d5add44a669a4785c95fba63e4d3e1f77fceaabaeb5", + "from": "0xf89d7b9c864f589bbf53a82105107622b35eaa40", + "to": "0xb9635fc9f6a94ea729e4481ecd8372355cc0ea99", + "input": "0x", + "nonce": "0x81139", + "value": "0x31222882f5b8800", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xe0642709c569a1f051182bcf19dc9dc1092d9d797c16f30a3825ba3dad0f078a", + "s": "0x65749a408b46d4c69011c22ef4ad4900e43c492672c9442c15d719a548a47291", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x42", + "hash": "0x5528a017e2270c5fb39e7d3afe591f733b3e5d75deef46d037219ee6ab4b59bd", + "from": "0xf89d7b9c864f589bbf53a82105107622b35eaa40", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb0000000000000000000000009b9f92492e3816f21a0302c74274b71ac1ed43880000000000000000000000000000000000000000000000000000000016f361a8", + "nonce": "0x8113a", + "value": "0x0", + "gas": "0x15f90", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x2e90edd000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xa6b37004dcad2a30626f5204f9b6cbab0c841f2d0bede0601c74f066a4f735ba", + "s": "0x5e4fed6f3a092e23d1c64014c5a6a9ea94c1ef2bed1f84ac7f539c52ca9c826a", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x43", + "hash": "0x0bf44da54d0288b745d08ae931d1fe6ce5425fa04c1ebd6ea66ecfddcd45addc", + "from": "0x6f7171a84f12a506a4ff99d7ae3856e0167f632b", + "to": "0xf57e7e7c23978c3caec3c3548e3d615c346e79ff", + "input": "0x095ea7b3000000000000000000000000e66b31678d6c16e9ebf358268a790b763c133750ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x18", + "value": "0x0", + "gas": "0xc158", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x113abe6400", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x46b4b7a11fdc96fdc8842becd71b3a0beeae9c7b9864d003ac843e027697310e", + "s": "0x2ba3171fbd76f8ba6427da2e9569fda48a2b6d87d696d0ba518adfa4ab5b2a03", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x44", + "hash": "0x339db21fcf1d5382be4ed8eff77cb9b74df896482277b2c02cf0841ec83145eb", + "from": "0xf60c2ea62edbfe808163751dd0d8693dcb30019c", + "to": "0xedc7001e99a37c3d23b5f7974f837387e09f9c93", + "input": "0x", + "nonce": "0x181cbd", + "value": "0x1935239e4bf4c30000", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x1270b01800", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x4a34ed10035d828c33bbac1c49bda191a13a0a4d00f63ff73dfe62ce294bdf9e", + "s": "0x54cdf37e6706de7fd8746d987aa9118be472a4d0f906e9a48f5d180abbf72df2", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x45", + "hash": "0xc56fb574e153215d3b00d0e523e8dcc903fda6ff2898bf832d3111c7210f8b3c", + "from": "0x006961c62280572b646906fcae156d8731cb65ef", + "to": "0x2153eaaf34423fec8e198bfb0eba2167ae105696", + "input": "0x", + "nonce": "0x0", + "value": "0x16b820f9850b9f", + "gas": "0x5208", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x13532f7e00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x70a5f3db147033ce5e9493ea91651325b9a57f836a9da24f3fa8b2fbf0a44f8b", + "s": "0x521f60bcbf1941193267b8db187513add9359c48031b67925a56fb4fcaa73a99", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x46", + "hash": "0xc29a47013efab86459a631e234ea41d0f2dbd9ba3955299ba9bae00dc5d10c02", + "from": "0x6f7171a84f12a506a4ff99d7ae3856e0167f632b", + "to": "0xe66b31678d6c16e9ebf358268a790b763c133750", + "input": "0x5cf5402600000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000f57e7e7c23978c3caec3c3548e3d615c346e79ff000000000000000000000000f57e7e7c23978c3caec3c3548e3d615c346e79ff000000000000000000000000000000000000000000000085721e68651a40e26e000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec70000000000000000000000000000000000000000000000015912687d24779ada00000000000000000000000000000000000000000000000000000000000001e8dac748d4000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000f57e7e7c23978c3caec3c3548e3d615c346e79ff000000000000000000000000000000000000000000000000000000009655d6b9000000000000000000000000000000000000000000000085721e68651a40e26e000000000000000000000000af0b0000f0210d0f421f0009c72406703b50506b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000006f7171a84f12a506a4ff99d7ae3856e0167f632b00000000640753c800000000000000000000000000000000000000006407536e0000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000001b67a519b71c7446b33bd4a7a79f0f7bd0a74d878b1137ee62667f4bb89ce48628387776876fd3fea34186aa19e8e6533abe5d0bb68cad7a4aedae68261587125b000000000000000000000000000000000000000000000085721e68651a40e26e869584cd000000000000000000000000382ffce2287252f930e1c8dc9328dac5bf282ba10000000000000000000000000000000000000000000000ebe3ec9fa16407536f000000000000000000000000000000000000000000000000", + "nonce": "0x19", + "value": "0x0", + "gas": "0x4199c", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x113abe6400", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xcef099d1a9287bade00273de1e85f3b2ba25d4e4a3c5208cdee13e4b09d470ec", + "s": "0x3396536b0a954c26b21618e0a682d8c43276f613fe380e59b8326e2ab73ac0ff", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x47", + "hash": "0x12bc61c3a537eda1a669e21b233796249f69dc3c71e694dc3b699866efbb4c4a", + "from": "0x0a9478209a962c324842e85e3a026043e9c840f0", + "to": "0xbb0e17ef65f82ab018d8edd776e8dd940327b28b", + "input": "0xa9059cbb000000000000000000000000931b29812cee179e7a0c1f35d2a89103272668ec00000000000000000000000000000000000000000000000735beeb55f6f40000", + "nonce": "0x875", + "value": "0x0", + "gas": "0x30d40", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x1981a070ca", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x63d717d7ff85013ae17e862a27ef377477c76b29b6936b5605f26c02ead4df5b", + "s": "0x27e2bcc17f8714f9c221f1831e367c800eef4321fc1f43ec37912f03fabb8393", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x48", + "hash": "0xc76bdae136224d08abf880a20ce9b310d5e7f765badd5246ae2a04205133ee59", + "from": "0xdfd5293d8e347dfe59e90efd55b2956a1343963d", + "to": "0x1aab3354b8a4d79317cb89e6c6548688fcb7f243", + "input": "0x", + "nonce": "0x549fc7", + "value": "0x2dc8dbba3e0800", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x5f9c6dadf1ca7740952ed4558c9d5c84ed33f8e5736e966e9f450c81ba420596", + "s": "0x4f163a8eb1cb41a9fa3e2c09349f45885d94d87ef2041853f26339832101697", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x49", + "hash": "0xb9ae178f939257bb5ab77d84baaa99b164191c40fdd8ecf0799bc474545dc14b", + "from": "0x4976a4a02f38326660d17bf34b431dc6e2eb2327", + "to": "0x48ddd23a4de24c5491b31eda5b46a424f19691e5", + "input": "0x", + "nonce": "0x2385df", + "value": "0x1bbd2a02890d0000", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xb8d75c52206273455edea890d9e5659b898acfb77e14bed954ac6fe102139813", + "s": "0x2fa9a212ad519180caea1db09b26af3cad4a19cf8ac3c793ed57a0602b9c2083", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4a", + "hash": "0xe76f14ee635834682bd9f254b8a331bb8b7de9ef287f1518d5787777c793f992", + "from": "0x9696f59e4d72e237be84ffd425dcad154bf96976", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000efed613afbd5e2844d123859dbee10f583cf0b6f000000000000000000000000000000000000000000000000000000059642d9c0", + "nonce": "0x3e0977", + "value": "0x0", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xc4c74e3666a71c39c1b4b7756b08a893de1029f76d9e222519c130303ee66cea", + "s": "0x7bec058ea89a52fe4ac68399fb1d0769170764050e8cfe48a808fc811d66c141", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4b", + "hash": "0x2bb798660f36d5b969092028c75a5e47246705408d84b09b59a6b02c34318973", + "from": "0x21a31ee1afc51d94c2efccaa2092ad1028285549", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb000000000000000000000000d3bab3acfcbdc3965236f7f102976869ad9b8cf20000000000000000000000000000000000000000000000000000000014dc9380", + "nonce": "0x58a258", + "value": "0x0", + "gas": "0x32918", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x17bfac7c00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x262b0ba271785c74d6f4e5109d04ddc17f10d59915b0d8b2fd6a304ac6f22f13", + "s": "0x8602e7a3b6adef8dd275a0c91cd43658ad3de6885aa552ffa05e94928d68d60", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4c", + "hash": "0x5ee46d9813a5f7696848ba14bc4b3b0d853e6e20ebdb1a31d55b0d666c1c9f8d", + "from": "0x37016353f04a8288b922b5b4f5dd153f28a57bed", + "to": "0x0f5d2fb29fb7d3cfee444a200298f468908cc942", + "input": "0xa9059cbb000000000000000000000000931b29812cee179e7a0c1f35d2a89103272668ec000000000000000000000000000000000000000000000073a15246e1b4340000", + "nonce": "0x1801", + "value": "0x0", + "gas": "0x30d40", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x1c6c3524d6", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xeef360cbcb72ecb5c1f4852354d3648b352f5d06475522ede059461102f304bf", + "s": "0x59ff0be43f559d03ec8fc12e50fd5b9c3a3a2a086f1bdc10ff2ee018a51fcc8d", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4d", + "hash": "0x2a681af892f0cfad9bb9bbc51c249011836fcfa66f79c759f329d6dc96a9f24c", + "from": "0x561cafbb9ff904baca4ecbf564e3466bfeb0ec92", + "to": "0xf1f955016ecbcd7321c7266bccfb96c68ea5e49b", + "input": "0xa9059cbb000000000000000000000000ca246a06b1bfbd8d0610e1c80e169eb570b5016900000000000000000000000000000000000000000000efbc05ddc2f4f6080000", + "nonce": "0xa27", + "value": "0x0", + "gas": "0x30d40", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x1981a070ca", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x40818e5bdf6c8e675e4fac536794d01601629c44c1a8a26d792f33e0e73024f2", + "s": "0x5eab1f05da312b57bc4cf8746fe4d64bf7ad31ecbd4515ac501f88ed2887764d", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4e", + "hash": "0xc53b992584e4f78767dd9263d5bfccacead9e98387add364237974ce652297dd", + "from": "0x79fa239f2b83c714e6b82a17bd636ec5dc9691d2", + "to": "0xcf8bc271b1c539d96bc2cab1c1333dd7c8823358", + "input": "0x", + "nonce": "0x1b", + "value": "0x6f05b59d3b20000", + "gas": "0x5208", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x14fb1eda14", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x397c70f5b2abbe783a36bdd53724527449bff806904ec26708979c40e16cb282", + "s": "0x6ab10c3323a373acaf3575a53161339e27658ad86692cc54f9fd143a4bf6be8e", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x4f", + "hash": "0x721b9741261f3e097b9a0c7ba630897b5efc98d5190652b1dffe5d7f9225ed94", + "from": "0x218b5a7861dbf368d09a84e0dbff6c6ddbf99db8", + "to": "0xbc60258f775683ea28048030806ad3a80c4a33ae", + "input": "0xc98075390000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000004000001010001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000028000000000000000000000002beab0ea044c43f06c49631cfeec18ef0000db020302010b05070a060c040f0d030809000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000433878d000000000000000000000000000000000000000000000000000000000433bea000000000000000000000000000000000000000000000000000000000043426bb00000000000000000000000000000000000000000000000000000000043426bb00000000000000000000000000000000000000000000000000000000043426bb000000000000000000000000000000000000000000000000000000000435d931000000000000000000000000000000000000000000000000000000000435fa410000000000000000000000000000000000000000000000000000000004365a6c000000000000000000000000000000000000000000000000000000000438f638000000000000000000000000000000000000000000000000000000000438f638000000000000000000000000000000000000000000000000000000000439afbd00000000000000000000000000000000000000000000000000000000043a2d4a00000000000000000000000000000000000000000000000000000000043b678700000000000000000000000000000000000000000000000000000000043cc2ca00000000000000000000000000000000000000000000000000000000044292a5000000000000000000000000000000000000000000000000000000000445512c0000000000000000000000000000000000000000000000000000000000000006704c65d9ba581aaa8d94a971fd646bc64cea53f88423662209476754f98e290385bbd135b68239a61063c4b34a6dc5c107088998f52c91d3e9c565c681d60c7f73cf65b22281a31849a8ace968d795b08f07590820991267868e049faa187abce2e2c938dfd50cf4118a61a2f6a09eb926ebaaebb2c116b3e6f7fd4e68d3a38128af4d35d2459bf150730c486cd683bd6b982490f254b1fc8b49738be0d70fd0148775f034251b74c52fb7ff15d8c915015f7ea8862fbaf18baf9e10759a53220000000000000000000000000000000000000000000000000000000000000006357247e234902e6ddaea6c3ede048b77f1221350d9d85432d45c1f3b2d237d523478aef8e6b40c9694226c96b837a3ad26a081810da67d00233223a2279019f47946e3bd5e540080d999da1ac994885eb550de0eb51d19cd77a58a4be6c7e7df348f2578f70a7ad3b721d769ce6d94fa615cf7e5e3894626b8052bee4a82ac6b271c9486be21ec5c51a2da03e5d94c951be0a09e116eff37bee65c7630eaee0d0d0efed93ae3895baec0afffbd0ea417f020ca90b1f645c38fdbf7bb1ca72c88", + "nonce": "0x18564", + "value": "0x0", + "gas": "0xb71b0", + "gasPrice": "0xecac1808c", + "maxPriorityFeePerGas": "0x77359400", + "maxFeePerGas": "0x14dfe2c415", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x8db50c7a411a77a67b39379ff1c6a101bccb21991bf243a9ef05542a3f15b8e3", + "s": "0x654917017e864da234eb7f0b729315cb8100d3ea9ef3566abe41bcd9fda681ce", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x50", + "hash": "0xe4bb301785ff861550980bdfeb5c4db5454250dbf9eb9e4d63ec10cd6a1c7af5", + "from": "0xc4b732fd121f2f3783a9ac2a6c62fd535fd13fda", + "to": "0x365796abdec8b93963e76e4107417e71c46b22a0", + "input": "0xc9807539000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000400010001010101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002800000000000000000000000cd4a6b6e00a780d8d15bad5318428dfc000522e4060f02030a0706080e090b000d0c0504010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000044e911480000000000000000000000000000000000000000000000000000000044e911480000000000000000000000000000000000000000000000000000000044e911480000000000000000000000000000000000000000000000000000000044fc2418000000000000000000000000000000000000000000000000000000004506a1a400000000000000000000000000000000000000000000000000000000450f36e80000000000000000000000000000000000000000000000000000000045102b0c000000000000000000000000000000000000000000000000000000004512135400000000000000000000000000000000000000000000000000000000451307780000000000000000000000000000000000000000000000000000000045130778000000000000000000000000000000000000000000000000000000004513fb9c000000000000000000000000000000000000000000000000000000004514efc0000000000000000000000000000000000000000000000000000000004518c05000000000000000000000000000000000000000000000000000000000451b9cbc00000000000000000000000000000000000000000000000000000000453097d4000000000000000000000000000000000000000000000000000000004530981280000000000000000000000000000000000000000000000000000000000000006e42130ef41620fb4a82e6da5e2ad789183e5af55a863dc5ff254064f4672ce5c5755c7af3b3924b91f01369f34193ef2dfa60d033a4a002d94742cddfd935fdd8cb1beb9253901df75d1ad29800b4ed05f513fd6b50576fefda943c086b0b4d1ede6fa8d3c645b6a99ea5adfc999ad07c4f7ebf7dd0f17d3e465cf3718188b8a400231760b09778ebfcb9561ff61cb044b9bebb450070f75b535ecfd84cdf2560db1ef8258f73edbd57f8921bba8983bb0cdf97d0cfa64d8e64cc15364ce288a00000000000000000000000000000000000000000000000000000000000000063a964cc042911fb747588091abb36af563de5795f0bdb7bb2db7de864010c4f203cc4ff0f1708783146d176b969eee773b7cfdfc554e78f751144fe6a12aa48424c59f41dd460b8363ea19ad2a26a6cba1a26ac4a69d7c1acf1553371e82c76961bc663db0a5a24e96660bf92d2739c63d2973db18e2551849c53c53cbbcc7797122d77cce87b2fe2437b51b8b83c9a02cdd56c19701a2755bf131b60a6d42400b6318ca7e948f298e97ce7f3ca8d04f827223a48ae98b2b963950f1c0f1cfad", + "nonce": "0x12e00", + "value": "0x0", + "gas": "0xb4aa0", + "gasPrice": "0xeb91f0fb2", + "maxPriorityFeePerGas": "0x65932326", + "maxFeePerGas": "0x2e81af5956", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xfd05a29a0d6a3dd121fac0baebd5052568f1bd5cfe2fe124d924b56b791d0120", + "s": "0x5f920248330228aa90b43e3a64fbe2d9ec14a8e227b52c29b92b7b49261d22b9", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x51", + "hash": "0xf822de480bb828b3f66aad1f2843c3c1ae498d1f96ca85e72ba4248b0415a765", + "from": "0x317413be84685d956a1368af4d5702e0bb3d1ab9", + "to": "0xcac0f1a06d3f02397cfb6d7077321d73b504916e", + "input": "0x", + "nonce": "0x3e", + "value": "0x2386f26fc10000", + "gas": "0xcf08", + "gasPrice": "0xeacf41b8c", + "maxPriorityFeePerGas": "0x59682f00", + "maxFeePerGas": "0x16302240d2", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x8e2bb1012c206710909280399b75ed8f893df0456eff2471fe2d47cb2ceb63fa", + "s": "0xaa430c47d455d463a92ff294a6f416dbabf9bcfd0b26bb2e1673831b38314aa", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x52", + "hash": "0x294de7f0e2addff93337b32eb52823c41828386a60601ebca4d9beb0afa7d8c9", + "from": "0x9f2493d59def0fba23e6afcbdc82c6f844ba31b5", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f00000000000000000000000000000000000000000000000000000000000000030a090c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000160000000000000000000000000796a4503b444a71b331c9556bef0815237ddeabc000000000000000000000000ffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000642ee06c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ef1c6e67703c7bd7107eed8303fbe6ec2554bf6b0000000000000000000000000000000000000000000000000000000064075a7400000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000418f723f784262da4a9153da88f2fff76759d704248a11fe7558db9bf15306d6a414b51b9c696de0e3475d3f7191c9e2f9ddbc5fd76b6e7f46ddf692ffb7709a821c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000029a2241af62c00000000000000000000000000000000000000000002fec95573ca0aad952a10e3b400000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000796a4503b444a71b331c9556bef0815237ddeabc000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000029a2241af62c0000", + "nonce": "0x12c2", + "value": "0x0", + "gas": "0x3e6db", + "gasPrice": "0xeacf41b8c", + "maxPriorityFeePerGas": "0x59682f00", + "maxFeePerGas": "0x1d0ea8e000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xa51b1ea96959eb0622d48ee3621ced034fbb3ff61656165e284ca271b4e506f1", + "s": "0x2b163361828f20cb795f4606d9dd8275388e391c0639a30b1faef8e952a5d2df", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x53", + "hash": "0x8ebe3b06abffa8124b86d7a7178da6d68fe4adfdd0b109d161dd6ebe024655ea", + "from": "0xd64137f743432392538a8f84e8e571fa09f21c37", + "to": "0x499d1b178b4643c12e3cf99d5b0244e9a754ee2d", + "input": "0xb27aed46000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000825a4f4b5350ab2e8a44512f811a783274606518000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000de0b6b3a764000000000000000000000000000000000000000000000000001abef52eb2cdb7cd67800000000000000000000000a58967aa9d385e2a9004209ddb5d30d65fb2572c", + "nonce": "0x3910", + "value": "0x0", + "gas": "0x3d090", + "gasPrice": "0xe9faab73e", + "maxPriorityFeePerGas": "0x4c1ecab2", + "maxFeePerGas": "0x19c799954e", + "accessList": [ + { + "address": "0x499d1b178b4643c12e3cf99d5b0244e9a754ee2d", + "storageKeys": [ + "0x064fabc3f2def75074b3cfa74124eecf672f4fdeed14a58dc371c92db57faefa" + ] + }, + { + "address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "storageKeys": [ + "0x596e06c3e121e71f94de09c4f8af843c7995cf31d45f937b2684100afc84d4b2", + "0x313063efe88a373513d3112833f3c81f61afe1c095803463dcf10c7e9d6af1b8", + "0x35f720aaebdff70fe542f5439168607a33fcc9c2b157383993eac0b2a3d780da" + ] + }, + { + "address": "0xa58967aa9d385e2a9004209ddb5d30d65fb2572c", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x000000000000000000000000000000000000000000000000000000000000000c" + ] + }, + { + "address": "0xb23d80f5fefcddaa212212f028021b41ded428cf", + "storageKeys": [ + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0xc2cf56af058e2f1e1a1682be6b9018dc897f1ff1b34aeb94e2b31657befa6ff1", + "0x8d288f130c823d48b45d878a65954f446638c5c187dfc5819471fcf26d444192" + ] + } + ], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xd7b1deff5a37df3556f7f3e5073f475bc6bbf359c9857c42d00a18fc8ca3c8df", + "s": "0x7b45512128538a4c6800f9cbf7b431ac9ccee4685d1a5ad57ef48be3bf72a9ee", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x54", + "hash": "0x4ee47a78dab922056eb75c70279518d0cacf9708c4a28a7d50c9926a040b6392", + "from": "0xc2b581bd8cfa31f7a3bdea98df398e9f3bf9ee9b", + "to": "0xbf578495e98d23a3e9d5ae4788e68904d1d97935", + "input": "0xa22cb465000000000000000000000000efc70a1b18c432bdc64b596838b4d138f6bc6cad0000000000000000000000000000000000000000000000000000000000000001", + "nonce": "0x1", + "value": "0x0", + "gas": "0xb66f", + "gasPrice": "0xe96e65b0c", + "maxPriorityFeePerGas": "0x435a6e80", + "maxFeePerGas": "0x126db52780", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xdac5577cff57a9755f60d63cef1a198a3e4ae3a0fff8f1ad93c13ee319a4b84b", + "s": "0x23b98f214a10da95e9003ee538edbc95e7c9573ebc84e065886332d6c50af8d6", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x55", + "hash": "0x40a086eb8dda9f69927963591ac9788f4a4e46441f3b5c95a26a8727ca0e2fda", + "from": "0x91aae0aafd9d2d730111b395c6871f248d7bd728", + "to": "0x91aae0aafd9d2d730111b395c6871f248d7bd728", + "input": "0x", + "nonce": "0x812a9", + "value": "0x0", + "gas": "0x493ee", + "gasPrice": "0xe951c978e", + "maxPriorityFeePerGas": "0x4190ab02", + "maxFeePerGas": "0x144520af92", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x5bfaf05629ab20c0593a67bd2f14afecf496a06c63dc886f4a28836352c316ff", + "s": "0x6141157074528f9492994fb71a05680484e503f36cb808873a76fd85711422c", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x56", + "hash": "0x8565f0406430028233cb6777a4b69e5c2f6a6366388d355190bdbc2f4c5d69ed", + "from": "0x2db1d8cdf1abe8c70b531a790cdf2ff38aecf652", + "to": "0xc91444fa4c5809e07d9541a9671ff9a9dfdf3b84", + "input": "0x", + "nonce": "0x2afdf", + "value": "0xa70b8808b1b000", + "gas": "0x5208", + "gasPrice": "0xe90f07a0c", + "maxPriorityFeePerGas": "0x3d648d80", + "maxFeePerGas": "0x1194bf2980", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xefaa913c4ad235d0aec32ebd2fc63b7513c15c7750e37fbfd7ac6effdba7fdbc", + "s": "0x7cc469c46840d4069df3a53631352fa0a8b31ee17be9b598a25d0156e2c30fde", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x57", + "hash": "0x2e9d04ebcfcdaedec2c2d107c12b8af67d7c7cc2d0696617d36129be3f364ab2", + "from": "0x585fedfb321434753541054131d28922178a36ec", + "to": "0xb4a75b312c99358c4a845ecb3c59503945f6320a", + "input": "0x", + "nonce": "0xf", + "value": "0x43d412a60d4000", + "gas": "0x5208", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x21c8154cd28851ec05c2b8d90fe3b51cbf5aafcb930d0fff1c00821bf45db87e", + "s": "0x221fe8d140b1f1a16afd7ad6cd637cea5ee1e070d33ae6cbdadb41ada0e607b3", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x58", + "hash": "0xa931281a81b58bfd15743be48455ac9ff86be0dadb01b5f5ad500f64165e08b2", + "from": "0x59c328055efd5aa1ae1d39bb71d79cd8b6546773", + "to": "0xf872ada8968c981cfb3769d58a03a3c018128b5a", + "input": "0x3ddf078f00000000000000000000000000000000000000000000000000000000000000d70000000000000000000000000000000000000000000000000000000000002f2400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x93", + "value": "0xb1a2bc2ec50000", + "gas": "0x24d5e", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xb1d17c66474570111513a5ee2344629cea58466db37fe8728d725173424ca9f6", + "s": "0x3dd5afc0d224f83567f965cddd9f2eeb5f405b4b6a81acd08d5445b0fa56bde7", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x59", + "hash": "0xa85119a1ca56b426aeeae45a65e8cb640162c0462a01f96866691c33aaedd5a7", + "from": "0xd8d6ffe342210057bf4dcc31da28d006f253cef0", + "to": "0x15d4c048f83bd7e37d49ea4c83a07267ec4203da", + "input": "0xa9059cbb000000000000000000000000690f74b98548791c906ff303e4d37c30b2e5aa4a0000000000000000000000000000000000000000000000000000228f90806000", + "nonce": "0x6881", + "value": "0x0", + "gas": "0xf36e", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1314e96ef3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x9133a8222042ca68db697dd2c455b49aa521e3f0ae2c2f6c344f5b61f33245f", + "s": "0x289be2e5191b104bde1ddeef2d0469aee5b27bc8c6f0fe94a41390f6e7b8f81e", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5a", + "hash": "0x3f1f1d56d5d16df1640ab6b5bf4543a7c74c26c54abcbcbe970d693c9e116ba2", + "from": "0x9b2dfb8862e3f398b73206c879a1f38136f22908", + "to": "0x9ea3cda5c2adf0370454b9ee28786a068227b1a4", + "input": "0xce2e62ff000000000000000000000000000000000000000000000000000000001ce2ee5700000000000000000000000000000000000000000000000aa42fc6468e8e8000000000000000000000000000819de42d3ab832eaf7111a222a8a5a7419f13b48000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000640753c0", + "nonce": "0xa626", + "value": "0x0", + "gas": "0x493ee", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x126d638855", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x18abfda2aba4153790af5f65d9a330f195552e4c99ae88e256c946a38ff0c913", + "s": "0x16c610e9e18fb1b254bf09225ac67bf07d5b6fd86f4fe23eec123655c3cd6fa1", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5b", + "hash": "0x8d19d5fb37288609f0f01bad62470fc6633fe0db9491921db88150a0f8e9e4f9", + "from": "0xcafc2cc12004c674134a6899197c2ac633918ff0", + "to": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "input": "0xa9059cbb000000000000000000000000a26e73c8e9507d50bf808b7a2ca9d5de4fcc4a04000000000000000000000000000000000000000000000000000000003b9aca00", + "nonce": "0x1", + "value": "0x0", + "gas": "0x14820", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x137283afc8", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x6bbe5554f388431b8cbdc2e97b279ccef98f9fa597830ed279430caa59f18566", + "s": "0x551fd1e6a4175911c1f66f2826775f79445126b152bb6e6f610913778d0824c", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5c", + "hash": "0xb60d1ab21ddb579c8e8de4ed449626f64e9f08aad165c1a670db5d5082c72400", + "from": "0xddeb598fe902a13cc523aaff5240e9988edce170", + "to": "0xbd6c554554834ee97828b6da732dca7461ddf9d4", + "input": "0xc9807539000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000003800000000000000000000000000000000000000000000000000000000000000480010101010100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002e000000000000000000000005d731b16846523d88c6cab92a014b1350001028a02100a11020003060c08050f0b04010907120e0d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000130000000000000000000000000000000000000000000000000000000006f1d3a20000000000000000000000000000000000000000000000000000000006f1f8180000000000000000000000000000000000000000000000000000000006f21da70000000000000000000000000000000000000000000000000000000006f221c70000000000000000000000000000000000000000000000000000000006f2384d0000000000000000000000000000000000000000000000000000000006f290800000000000000000000000000000000000000000000000000000000006f2dea00000000000000000000000000000000000000000000000000000000006f32cc00000000000000000000000000000000000000000000000000000000006f35f880000000000000000000000000000000000000000000000000000000006f593dd0000000000000000000000000000000000000000000000000000000006f593dd0000000000000000000000000000000000000000000000000000000006f5ebe00000000000000000000000000000000000000000000000000000000006f7a63f0000000000000000000000000000000000000000000000000000000006f9262e0000000000000000000000000000000000000000000000000000000006facde00000000000000000000000000000000000000000000000000000000006facde00000000000000000000000000000000000000000000000000000000006facde00000000000000000000000000000000000000000000000000000000006fbf5120000000000000000000000000000000000000000000000000000000006ff2acd0000000000000000000000000000000000000000000000000000000000000007245b907e2e74053d22bc324f568defec8d0f8a129348b6bfed73b95d9b54c9da362156958ed3b0b91f4ce8cd7521fea7bdcc7e6492b291d951aeb4065f31a2d37d793b57fc39b956af1db131e25d41f2b5e9191b6fe37122cc38c38fa179754beed9df22eabc741dbb80dc5838da46e928d3bee08c4b3a683f22829967c0c41d227e6873fe34b34d2d8aec3067e704de25f8cf287d2bca1303bc27c8a9e55ab18f3491c1d487334f720f4a5b935209d632d0a3d4881b7f6d23e123586f06afb4eb0eb08256c4d6bec64b7a64aaf6911ffc9f07c73835fb7af0f91bd8d7c5f5f2000000000000000000000000000000000000000000000000000000000000000706fd2bd76221a3a640558ddff899aee3ff3f6851713a5ca988af6f68b8f082d5620f94f7919924a199d08169e6e28365f9251ae7de7b299bea3c5abc0140308c754d08045945ace2793c83ca9edd629b0ab6d178103afe4c68b2b90f5f01694569e19e972e6a83d47ad1802168990744b63e4d52bd6c401a218732548eefaed854bb7c4d83a20b26a2520713ebde372ae4079ec3bc279fb20ac64a15b4a6acab2a672e40868677c2e238c16a3d9b6ca26d4ef504cde933c96fb355ef36fc808639abf737488bfeb3f45ad650c088eb310f5d480658a6bfe0f5d9c4aca917a0f1", + "nonce": "0xdef3", + "value": "0x0", + "gas": "0xb4aa0", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x299b855df1", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x1a2e2de17bf556c40afbbb794045dd2cef710828d0bf7985314021c6665aad85", + "s": "0x2c196bf18c41af73404214a2ee4f759e475ccc318519a0f2717aa388c2db19cb", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5d", + "hash": "0xb82cbc4f6ae1f80cb77da818a5210c03498b818b2e65bd50b9f0a5bfcba97784", + "from": "0x90ecb7364743af98ea857696f4455b1702b03999", + "to": "0xd6327ce1fb9d6020e8c2c0e124a1ec23dcab7536", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x3", + "value": "0x0", + "gas": "0xb5a8", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x2ede86c0a33fa3c30b46ac0ae05fd8778b4df0058ef32e222f4e800f6c5e3e9c", + "s": "0x2ce61d71ba539d223f24af6f45f16b892bf20e5cb9927e1ee6c9a53a6b255808", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5e", + "hash": "0x7dfa7e817fa01d0d686d7ae23b6227719fc6c05562539ed74d74399d74b037b2", + "from": "0x098b88f9d9e40fcfa5e49c40293d7e7725cf6a11", + "to": "0xe95b3dc78c0881dea17a69bafc6cfeb8d891e9de", + "input": "0xcfe96c84000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000001cae620b60560c0b98fb53153b3cf0558f916e98a83ae6e2fd54cf7fdd1091f741818ebac000000000000000000000000000000000000000000000000058d370ab600022c00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x15", + "value": "0x0", + "gas": "0x45f2a", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x2178c97e400407be8586a77e008a23f2a75a033656a116aa7e10c05a10c53191", + "s": "0x6f7a896331a1bce169f1f34c3bd4a688f2148943850f6e4a3ea777c29fd5a1c", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x5f", + "hash": "0x2c05766ecb26e9fc8ca9b284d8ebf280aaddb12c04339a2134673958f7a4a2cc", + "from": "0x80c67432656d59144ceff962e8faf8926599bcf8", + "to": "0x3dee2d3152302051db7c82f7d18d83e0cb1e8cda", + "input": "0x", + "nonce": "0x2d3f2", + "value": "0x1632a2f72322108", + "gas": "0x186a0", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x29e8d60800", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x697bc0d40c7eda6926f669cdae0080657ea963a375ce7509d596416a44710383", + "s": "0x782460b439bb4834854faa3a50a48717b5dacb6cce892d0342a58d6faf75b181", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x60", + "hash": "0x9454e08eedd5d02921473d920dfbdb8923e2ef7eb46e470251fe44576979dedc", + "from": "0xab35a8f4eba5e1e34f2f64e229c2ae017c662c58", + "to": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", + "input": "0xa9059cbb000000000000000000000000a26e73c8e9507d50bf808b7a2ca9d5de4fcc4a040000000000000000000000000000000000000000000000000000000033428f00", + "nonce": "0x9", + "value": "0x0", + "gas": "0x14820", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1314e96ef3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x4d3356756f3e0f5b8267e6aadaea5865b261db480836707a637681e42f7d6f72", + "s": "0x5f7ddb3a10bb7b17b50a1429f2d3b189e341292769ce017581e0228077189518", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x61", + "hash": "0x3ce5fd24eefb2666e5e64c6afada2e06bbab32145299c7bdaa9fcac213f9157b", + "from": "0x9636b43b9a9382d5763bbcaba733cc708e7da741", + "to": "0x86fa5a5927fbaa82218743607765ec0f63e46bfa", + "input": "0xefef39a10000000000000000000000000000000000000000000000000000000000000001", + "nonce": "0x26", + "value": "0x26499fd8be9000", + "gas": "0x1f64e", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0xf5de81400", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xdfef8f54b5d176928cc55b6158adc18a7ee88b38c27b91574ad87fd1b38b2e60", + "s": "0x234d4d5dd9f38a30aa327f261c0055b5568c5a1de8c0b004c30ed755a8443c51", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x62", + "hash": "0xc9de8a79e36316b64c41ed26eb7669b3be41c45337be84085f0e6e70a70b7d0a", + "from": "0x50eaff24ae29b13ad3499ca6746a3710aa91a635", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f0000000000000000000000000000000000000000000000000000000000000001080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000dfc618173cd90b000000000000000000000000000000000000000000000060d9b3901abf5d3bd0d00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000f7b3f5a8fed821c5eb60049538a548db2d479ce", + "nonce": "0x5a", + "value": "0x0", + "gas": "0x302f8", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x7ccca561cc603637007e8cf92db5db2d7b42717896e72155d63dbd0af2b6c3c", + "s": "0x71e279af80390b523921d0405283d6fe53f63d0e242dff9cf50c6ef5b7bdc356", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x63", + "hash": "0x0f16fd38cfbf36957b99b43163ae56f83a6ae776feb12beb3214744101c78e55", + "from": "0x5404897d0646f5a649b39cf468eaa433611f34d2", + "to": "0x881d40237659c251811cec9c364ef91dc08d300c", + "input": "0x5f57552900000000000000000000000000000000000000000000000000000000000000800000000000000000000000008e870d67f660d95d5be530380d0ec0bd388289e10000000000000000000000000000000000000000000001679f899a10023f2de400000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000017616972737761704c696768743346656544796e616d696300000000000000000000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000000000000000000000000000000001869d957ede0000000000000000000000000000000000000000000000000000000064075406000000000000000000000000af0b0000f0210d0f421f0009c72406703b50506b000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec700000000000000000000000000000000000000000000000000000001882285c30000000000000000000000008e870d67f660d95d5be530380d0ec0bd388289e1000000000000000000000000000000000000000000000164f381636d38e96531000000000000000000000000000000000000000000000000000000000000001be7925bd5abec9f21f66f26fa4fe990407151c7a863d3c49d0a9468dd2c5bdb705365c29e550e627360eb57f9bba90954727d90a36f3b50dbe26d09c4fde8d7ad000000000000000000000000000000000000000000000002ac0836a2c955c8b30000000000000000000000002acf35c9a3f4c5c3f4c78ef5fb64c3ee82f07c450000000000000000000000000000000000000000000000000000000000000000a7", + "nonce": "0x58", + "value": "0x0", + "gas": "0x3c67c", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x185f27f5a9", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x97ee4558d9f66d43a467b50437728ed87907f551e332a3a7a5e5708b19ed7757", + "s": "0x292bcd5fc0dba09b6c51334273985364856fcdf3e5b8058e3f8825910511e30f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x64", + "hash": "0xdaa5f6760773a8e46546ff3c82a635b56d4a085e590c2d862c29bfdf5b8ab1d6", + "from": "0xcbd5672f0087746c64ad59a546a1eb7957c6bed4", + "to": "0x6c22910c6f75f828b305e57c6a54855d8adeabf8", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0xc0", + "value": "0x0", + "gas": "0xb817", + "gasPrice": "0xe8f26b68c", + "maxPriorityFeePerGas": "0x3b9aca00", + "maxFeePerGas": "0x1145a415fe", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x981ad1156735967306870b27322158ac99851f6f329c0d5440139c908ff79a50", + "s": "0x21fa3eba12a8f8b6bdb997255a905874ce31407e75bac2db5e14fdaf52c26f63", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x65", + "hash": "0x2daabc5662bc04ee3dce9aa1b10cfa4e0d00c3737c6c46e1cb3529f8605bec31", + "from": "0x02a6032fcc7f00f8a3732f35ba780b68bada41f7", + "to": "0x808507121b80c02388fad14726482e061b8da827", + "input": "0x095ea7b3000000000000000000000000ba12222222228d8ba445958a75a0704d566bf2c8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x51", + "value": "0x0", + "gas": "0xc882", + "gasPrice": "0xe87ffa88c", + "maxPriorityFeePerGas": "0x3473bc00", + "maxFeePerGas": "0x12bbca2b00", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xe8d931cad935c9712f985e68edb4984649d276d58ad56cdada7ff6ea3a6553a6", + "s": "0x15c85abd35e6dd8e905377ed0b638de7c8aeb1009d1bac168854dadaeac67739", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x66", + "hash": "0xb98fba6d827fe4afde98d96b5eb17355cb56d1a1e1d69d0a7e34113e4ebc2a2e", + "from": "0x6d0da23c433f83879cd820ecc451d445e168cae7", + "to": "0xdef1c0ded9bec7f1a1670819833240f027b25eff", + "input": "0x706394d5000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000016b5dcb38c0000000000000000000000000000000000000000000000036eb10a55d88b0000000000000000000000000000af0b0000f0210d0f421f0009c72406703b50506b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000006d0da23c433f83879cd820ecc451d445e168cae700000000640753c00000000000000000000000000000000000000000640753670000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000001c508810cd5731e6d1ee2780427252a6ac084bf3de5fc572f69aefcf3db05e6b17482a8856667f40df0fde47d4b5fe3de362a660c6a82ec4a9a5bd64b8a705016f869584cd00000000000000000000000086003b044f70dac0abc80ac8957305b6370893ed0000000000000000000000000000000000000000000000f62b2d1e2164075367", + "nonce": "0x2a0", + "value": "0x36eb10a55d88b0000", + "gas": "0x20ae0", + "gasPrice": "0xe8767120c", + "maxPriorityFeePerGas": "0x33db2580", + "maxFeePerGas": "0x11398d3700", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xd3dcc43f5e242ba939b735d9c83e4d3490fce46f2009019164ec0043a3aa08ea", + "s": "0x4be682aaeb30ab53d54b216eced12f47c418fe73478db6b5a476feebcae48507", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x67", + "hash": "0xdf2a3f4acca32130133830390d5e1eb789d9114dad6d339eb68fedce2e35dc39", + "from": "0x5fa81085fdf38b24faa68e14e19ec31b60a887f6", + "to": "0x6b175474e89094c44da98b954eedeac495271d0f", + "input": "0xa9059cbb0000000000000000000000008861bf5a6baba5c819b359552c6f0926516d62d600000000000000000000000000000000000000000000000a544c092262928522", + "nonce": "0x191", + "value": "0x0", + "gas": "0x9548", + "gasPrice": "0xe7f0ed70c", + "maxPriorityFeePerGas": "0x2b82ea80", + "maxFeePerGas": "0xf937ba0ed", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x94d91e417d1666bae998ec63b2a142ba51432d3b39b76f103fdbe11092a05c5a", + "s": "0x20b7f5c62294d63a11953c1c8e8e5756542beedf6917c1b02e258eadc8e4532f", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x68", + "hash": "0x659192dd9a56ecd7cc4c5dd91d2694845899e31f62f9f3b441ed214e77a97ace", + "from": "0xd2c82f2e5fa236e114a81173e375a73664610998", + "to": "0xd1669ac6044269b59fa12c5822439f609ca54f41", + "input": "0x0dcd7a6c0000000000000000000000003e3b6f0b9c5e7279d4eabee754a82ec31a1427ac0000000000000000000000000000000000000000000000089869870f726d32160000000000000000000000005faa989af96af85384b8a938c2ede4a7378d98750000000000000000000000000000000000000000000000000000000064108dec000000000000000000000000000000000000000000000000000000000029b3f900000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000004135b71e2c99229dd1f58a004bb81dcfac0814c07aace4607174200c443a7a7d044f08e7eee7c90910282cb4b31927ca651dcdaf5e121e7d25144caa2552532b001c00000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x1e5a52", + "value": "0x0", + "gas": "0x32008", + "gasPrice": "0xe75d1ba4c", + "maxPriorityFeePerGas": "0x2245cdc0", + "maxFeePerGas": "0x1c88554c68", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x68b39bedbc7200ba8cdafb851609480ca3db465a0199ea27517f25c7c11553c6", + "s": "0x649f37f78c73914563b92a61643458633974fafb020296b713738efe360a704f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x69", + "hash": "0xf44ad1ff1b1543354240c16277eb00244c8aacc1f824f86b8d2a8d7881eb9a4c", + "from": "0xd436e2a0da348db9fd050665689707bc8842ec99", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f00000000000000000000000000000000000000000000000000000000000000030a080c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000160000000000000000000000000f5d126077096e5b01bc30ffa5d9324d7202d7cb3000000000000000000000000ffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000642ee06f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ef1c6e67703c7bd7107eed8303fbe6ec2554bf6b0000000000000000000000000000000000000000000000000000000064075a7700000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000041e611451b3d1cb8817bb6424a65a8ac255454796bf841bf370661069bb1b973ed6e6b28129ce4f0b537e9ad3065c387222a9ac4ed9d9a64b9be06807b08cf4d271c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000321937018414e027c8af000000000000000000000000000000000000000000000000004a6702cd1e70db00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000f5d126077096e5b01bc30ffa5d9324d7202d7cb3000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000004a6702cd1e70db", + "nonce": "0x3", + "value": "0x0", + "gas": "0x48f0d", + "gasPrice": "0xe7454420c", + "maxPriorityFeePerGas": "0x20c85580", + "maxFeePerGas": "0x134c087000", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xeaa308828c7ed69a41e7e1baf33bf613429bccf7d6925df1e5ed30242f8d24b2", + "s": "0x2666245fb041d7d5363db6fe093262238a7e2dca3301ddb943422b58725e9674", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6a", + "hash": "0xaea5f6efc319a239cbe93a6342823b1b8b527bd9ba4b2e3c3f9d82f23ccd99c5", + "from": "0xaaebcb343b547a8cee22d5b821e237b1b2fcc48f", + "to": "0x4dbd4fc535ac27206064b68ffcf827b0a60bab3f", + "input": "0x439370b1", + "nonce": "0x1f", + "value": "0x1c73904d4fc310", + "gas": "0x1683b", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xae68406aa2e766b68636b1a167c1f7b4dd03392b6826df923023387730cc94f1", + "s": "0x4c4c20411b467fcfe2759543afbede9e2b71cbb3d8ed164b8c28dee0333ab59d", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6b", + "hash": "0x68088ec3dc682e1d37c91c94a44b7e23d15e35849844281867e9a18780bed5b9", + "from": "0xb85c6b791634df012310ee9e46da52d54c2a05d1", + "to": "0x0c3f088f04e022a1409ceefdf685eee79ff2b29a", + "input": "0x", + "nonce": "0x74", + "value": "0x3782dace9d90000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x13414e2a81", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x805238a84156e4145b86c61dc8e435422adfa244f6c161b16ac2caa36e9ca2e", + "s": "0x6d53569602b9554d04d18e065e9e2afbf64c9404416b7555ff56b453fcdb2826", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6c", + "hash": "0x305e2576ed044415f0a664aa0a455e1eeb2b51255ff2133b89591f587751aa1c", + "from": "0x5846013e2560da58cc69954fc20f3f0039ca1ed8", + "to": "0xdbd36dc7294c9b99f6c72e3f9c7a7c444b71e24c", + "input": "0x", + "nonce": "0x6e", + "value": "0x473693ab871b708", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x13414e2a81", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xfe3359abe88c7ed57179030def4d326e8748a7e84948e462a15452f35e8b2898", + "s": "0x7e9191901d8a142e03682f85d4610128851e4b8e11b293f3aad193d2e0730dec", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6d", + "hash": "0x8b8caf23d45df0b8dbf85641c3cd50794db22321f29ff661f5157794ccb225d1", + "from": "0x447acec66808f07285f0947b8d2e10356c3a8fa7", + "to": "0x18b4e0ba07c299402d95e6909002a72c69c2be75", + "input": "0x", + "nonce": "0x1e4", + "value": "0x18de76816d8000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x52bb01ba5412fb38d662746df7c0ff0eb7dcc82fe3de622a8f1e136dea0ba8ad", + "s": "0x4cff059ff37074a3e775700820f90ce1c6bc38777f3457a30bc83b90a544b378", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6e", + "hash": "0x1c62d6a48b04595d8e6e4d263bd34e258e9da882f1e80e828e3a8fd747427288", + "from": "0xcaac6ad4b639fad37b670b2f707830f8a7ee2db5", + "to": "0x19eaf7fbeade0e1ea6976f1aa260e939c1c52130", + "input": "0xa22cb46500000000000000000000000000000000000111abe46ff893f3b2fdf1f759a8a80000000000000000000000000000000000000000000000000000000000000001", + "nonce": "0xd55", + "value": "0x0", + "gas": "0xc91d", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x8052c5986a7272ff5f2b58b9f09d290ab70877e490aa7572b6d1c419521c32fa", + "s": "0xd619bdbe9c03315467c264e9d4e2713439bea872e84fdb851bc977e68684079", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x6f", + "hash": "0xf674c579b9f953b16bee16d23b01464ea4ddd7186779d2b57dfac05a509bce28", + "from": "0xa9fff0316ad1fd01db39f71743e0d807f4461a4b", + "to": "0x3c03e5376b7faa9749c559d983cdd71e8f0f0a0c", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0xaa", + "value": "0x0", + "gas": "0xb5fd", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x6b0096a06d31936b05b6e9282a1227c491a902e1152658c6bf7e4108cd29c0c9", + "s": "0x1996c88398d0afd94b67c6461563bceded886e3450a66fe5528a89283ee52ce", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x70", + "hash": "0xfc9c0fc92809547a93d6ff65e8d2c6a99cafc1f419d4e0e545cfe845e5a8fbbf", + "from": "0x0f0eae91990140c560d4156db4f00c854dc8f09e", + "to": "0x00000000000001ad428e4906ae43d8f9852d0dd6", + "input": "0x87201b4100000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000102000000000000000000000000000000000000000000000000000000000000011c00000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000005400000000000000000000000000000000000000000000000000000000000000a2000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000046000000000000000000000000000000000000000000000000000000000000004c00000000000000000000000009bca17912fe88408d420e95d49a012a8014508a7000000000000000000000000004c00500000ad104d7dbd00e3ae0a5c00560c00000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000006406f04d00000000000000000000000000000000000000000000000000000000640841cd0000000000000000000000000000000000000000000000000000000000000000360c6ebe00000000000000000000000000000000000000004b4ee54df531b0e00000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f0000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003000000000000000000000000e70659b717112ac4e14284d0db2f5d5703df8e430000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026a716c2b0f1200000000000000000000000000000000000000000000000000026a716c2b0f12000000000000000000000000009bca17912fe88408d420e95d49a012a8014508a700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000031b95c50a5e0000000000000000000000000000000000000000000000000000031b95c50a5e00000000000000000000000000f7e2f5919081fb9ff8d13fd340351f450cf3309500000000000000000000000000000000000000000000000000000000000000405f1e564a6dba39842f32c90a46e64b08c266eb9d218efa139b7085800d5c7d98b1213959b121a0ba119e707425351b957c6e1c2a5aec161fe839932d68c77ebe000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000046000000000000000000000000000000000000000000000000000000000000004c000000000000000000000000050e4a02362373d9dae47f340c82fa146e1d06158000000000000000000000000004c00500000ad104d7dbd00e3ae0a5c00560c00000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000006405fccd000000000000000000000000000000000000000000000000000000006409f14d0000000000000000000000000000000000000000000000000000000000000000360c6ebe0000000000000000000000000000000000000000b0ffdc1769d5d6170000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f0000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003000000000000000000000000e70659b717112ac4e14284d0db2f5d5703df8e430000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000025e2e8892cd1000000000000000000000000000000000000000000000000000025e2e8892cd100000000000000000000000000050e4a02362373d9dae47f340c82fa146e1d06158000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f8b0a10e47000000000000000000000000000000000000000000000000000000f8b0a10e47000000000000000000000000000f7e2f5919081fb9ff8d13fd340351f450cf330950000000000000000000000000000000000000000000000000000000000000040ad52f00d32acb51502c0b47109f0af7e61478e55df68248b84564a13a57eed1bfa7f9fe52b63babc2c37b6da10f9733c8e16a82dcd70743af9a9c80c00c07d42000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000046000000000000000000000000000000000000000000000000000000000000004c000000000000000000000000042c8df348150519a6e8dfca862a0d0f04c6b6020000000000000000000000000004c00500000ad104d7dbd00e3ae0a5c00560c00000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000006406f49f000000000000000000000000000000000000000000000000000000006408461f0000000000000000000000000000000000000000000000000000000000000000360c6ebe00000000000000000000000000000000000000009d47e6a7224134910000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f0000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003000000000000000000000000e70659b717112ac4e14284d0db2f5d5703df8e4300000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006400000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001493135ebfc160000000000000000000000000000000000000000000000000001493135ebfc1600000000000000000000000000042c8df348150519a6e8dfca862a0d0f04c6b6020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001a77bc4b93a000000000000000000000000000000000000000000000000000001a77bc4b93a000000000000000000000000000f7e2f5919081fb9ff8d13fd340351f450cf330950000000000000000000000000000000000000000000000000000000000000040f63fc7b7d67f413f31f0e74ed5c9c4d4a32e566a6a115a0de866ef49109fab02d6564e588e943606920035eaa1be002a0c9c983eeeb1ff3591db4141ea4f5758000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000ed549c1f360c6ebe", + "nonce": "0x22e6", + "value": "0x1988d1af9f1ff000", + "gas": "0x69a2d", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x13414e2a81", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x62edbd31521409330be9d0c4cdd52ed44e9e996616dbe3a72f5a931ba4be7e50", + "s": "0x78a947cbf83dd926fbe4059a813af1f4618b266bd44e94fa1673e42f555e0bfd", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x71", + "hash": "0xe275391b4a35e063b921365ad5d62279eb5a5c800e3e6cc0dc4958646182d585", + "from": "0xaeb27a64cdee1aa56d49e45f26a3a8b736b97899", + "to": "0x0000000000a39bb272e79075ade125fd351887ac", + "input": "0xd0e30db0", + "nonce": "0x44d", + "value": "0xde0b6b3a7640000", + "gas": "0x81a8", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xedd25e8765fe1c601fdd117040be6ca8a2947c3dde70dd4b207b62e9d2e0dab5", + "s": "0x6b724b8c725af43ae56696d6a076d18b96a153f113054adb89fef9f8fe09683e", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x72", + "hash": "0x4d30ad4b778dd7df3a911b66abce492e369e6b6d355c074c04ace1a29b2e0160", + "from": "0x87047f8a5d1b8c8e7f84ce6f4079d2c586b1eb4a", + "to": "0x6c022bd50aaaf1a851b63da854c660726b25d4ae", + "input": "0xe651372500000000000000000000000000000000000000000000000000000000000025d90000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x214", + "value": "0x0", + "gas": "0x2da5c", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x4f37eedd0b10240f05765bff9bb60a15ba0036373b6cf750051055c940892bad", + "s": "0x245d97608a1db58c0f4020424360d24bd0143e321df7daf18405a0d8fc0ee30d", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x73", + "hash": "0xd73b04334ffe2a5753e6a761a4823d18353a03776ef51a4256e2ba09a851b987", + "from": "0x501036f867924cdfb7d903205ccb8539bf286bd0", + "to": "0x0000000000a39bb272e79075ade125fd351887ac", + "input": "0x2e1a7d4d00000000000000000000000000000000000000000000000542253a126ce40000", + "nonce": "0x155", + "value": "0x0", + "gas": "0xa08a", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x30bb6d2f72b83e93d683265a186d1e39cbb2133284b2b6056e563edca624eb1c", + "s": "0x5102a2dd139320e7d9e9bbaac56914c46e9ad9b9a6d51aaf45e342d8b962afcb", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x74", + "hash": "0x0743291386902fcd473ddb7e877f5064496d7908ab99922d2eab153eda5e3479", + "from": "0x90f37809cccf13705a1f5e9fca645ba3fd58773c", + "to": "0xa88842ae47dbe87116cf7001dddd1b246fcd8262", + "input": "0xa9059cbb000000000000000000000000dd81c93fd89c4960719d5ca56cd4234248cbb97b00000000000000000000000000000000000000000000d3c21bcecceda1000000", + "nonce": "0x2e1", + "value": "0x0", + "gas": "0x2038b", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x327064cb651b767e32a25ab51fb5da51a4028407bb7ff5c9ec8e6366f7581e3d", + "s": "0x6d9172863cf75b054e38b7c09d285cc9394f4d1e395c5fc13dc0db9e835fec0d", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x75", + "hash": "0xf082e7919a81048b66eefa015800191e6adf3d9847cef165a2caf80957f0d9a0", + "from": "0xb7cf16f33ec7fa4c7863d31298acaeaa6a7fb200", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a5700000000000000000000000000000000000000000000000000000000000000020b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000eeb2acded8b8000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000eeb2acded8b8000000000000000000000000000000000000000000000000000000000006220e9c400000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002bc02aaa39b223fe8d0a0e5c4f27ead9083c756cc20001f4a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48000000000000000000000000000000000000000000", + "nonce": "0x4d4", + "value": "0xeeb2acded8b8000", + "gas": "0x324b6", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xfe4a18238af3102fc3d9a82b39e4c3ed631275e7566d02160d3ccaee63d5f896", + "s": "0x70d64984956b9e84d5c54a79c1ba8969a391d7bf27f6b182743551ccbead141c", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x76", + "hash": "0x5aa68cd81fc0c7fca6a940eb337b4fee8fcf8df4aff92b2549d46ca380e57923", + "from": "0xeb436287a00ef18f33097d2942fbe205a6891621", + "to": "0x00000000006c3852cbef3e08e8df289169ede581", + "input": "0xfd9f1e10000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000eb436287a00ef18f33097d2942fbe205a6891621000000000000000000000000004c00500000ad104d7dbd00e3ae0a5c00560c000000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000063fae87900000000000000000000000000000000000000000000000000000000641fd2790000000000000000000000000000000000000000000000000000000000000000360c6ebe000000000000000000000000000000000000000099fc6b90572533150000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000300000000000000000000000048fbfdf6e6d361339e575e6a4efa2ff8cbbec107000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007bb9272a4d9000000000000000000000000000000000000000000000000000007bb9272a4d90000000000000000000000000000eb436287a00ef18f33097d2942fbe205a68916210000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009f295cd5f00000000000000000000000000000000000000000000000000000009f295cd5f00000000000000000000000000000000a26b00c1f0df003000390027140000faa719000000000000000000000000000000000000000000000000000000000000000300000000000000000000000048fbfdf6e6d361339e575e6a4efa2ff8cbbec107000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000008000000000000000000000000d5a06056701c7cfba12b1a384294148b9d2d8bca00000000360c6ebe", + "nonce": "0xb", + "value": "0x0", + "gas": "0x13545", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x13414e2a81", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x845dfb5b694ca8b3c89fdc142faf2aa7838ee93b1eb705fa947f3b8c05eac3b5", + "s": "0x628d57554214babf2d519e1e923e8ba5f348a6ac2f972e9199ead6d31c5f809c", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x77", + "hash": "0x88b95fba0d44d6d775f6e733235106c9feb1c793f2cccd70aafe9683322fd7ed", + "from": "0x2721ee4d44f2aeeeee8c94130c458276b8268565", + "to": "0x3aada3e213abf8529606924d8d1c55cbdc70bf74", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x510", + "value": "0x0", + "gas": "0xb5d5", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xbc9491cecde69a92c9b478aeae1c9dccb3bb8964ac02809082e483f54f6dc1ff", + "s": "0x2c3e2f7749b71c89b3ccd2b17278878db5b2c4deff595eb5028cdb360b1ebbb3", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x78", + "hash": "0x7f0f0e72aa22585892e784c7a3af9b25288e27c0b8a80f5a7f2cbb8022df4b14", + "from": "0x336d94511324c75654ba41189f45a72d1ba27558", + "to": "0xf7ff004c060caa5136bf86379b7c3e5ac146f2da", + "input": "0x98ae99a800000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003", + "nonce": "0x10", + "value": "0x0", + "gas": "0x1217c", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x360b0509aaf7f0d2d131ec30e939a72f9ef15873418a2ee754edb7b0ca343240", + "s": "0x32e606506c4de438f0f94164085eaea03370376b76365a41247a99d8accdb443", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x79", + "hash": "0xf8a285b4920265627e481fafceacfc64f7f9d10bc9b05a7aa5dfb4ca7b54c919", + "from": "0x2a34cfb41ec98242829c7a8e61b6b17f62c28b9e", + "to": "0x7e77dcb127f99ece88230a64db8d595f31f1b068", + "input": "0x095ea7b30000000000000000000000005fdcca53617f4d2b9134b29090c87d01058e27e90000000000000000000000000000000000000000000000008fe9a40eb0cd0000", + "nonce": "0xdc", + "value": "0x0", + "gas": "0xc0a5", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xfd197d434d3311f952a41a84cfa183b1f0d4131909d7d8b17a9eb35755526c9b", + "s": "0x106c26942a9585b328d5f32f18a6c4de99714756a8b733ed14c1b454cc2cf0c3", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7a", + "hash": "0x79b81b5cb23c7e0e3d60dab1f1982aede35c47d552de38edd5941c21d1884678", + "from": "0x0fecef59947fa6401fea1ba10b3aa173dabe2412", + "to": "0xddae210024c36cf7597a7de091ddcc9ecd389e49", + "input": "0x", + "nonce": "0x389", + "value": "0x8ac7230489e80000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x490593f0c570ec5048f10163a63f38fa25efd6377ddaaad954e163e214e31d53", + "s": "0x19a0cd25f895e341ac01c2ce98d8617a70433c91ba6c1539759c398a3e4544e1", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7b", + "hash": "0xded45ae6aac2fd6db29614962ddd46750b960f5363dd37fa5b73cdb67a3b4e9d", + "from": "0xbf379fa98594f56dee8276d51a17d00887941fa4", + "to": "0x2a6e875f5dfa7c7af007fcff8751e39641022ce1", + "input": "0x", + "nonce": "0x12f", + "value": "0x4d7e5a6a29c8000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x3194b9cd7d021ab335073dd2fa4dc3a8e79c62238b12b7c49029ff535b9cfc86", + "s": "0x55ae8c2db5a89e2c002331aad91720fd98c7966e93688285fc0e0e827fc5f667", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7c", + "hash": "0x2b8a0c90f3e219b4d13574e334791d0340c00abf762278cdd5bbde5630e22bfd", + "from": "0x3ba763eaabb146b16ef66570053032dbe584e321", + "to": "0x87c4ba6f6ee73e0d420eb56c54243a5ae486e51b", + "input": "0xa22cb4650000000000000000000000001e0049783f008a0085193e00003d00cd54003c710000000000000000000000000000000000000000000000000000000000000001", + "nonce": "0x72", + "value": "0x0", + "gas": "0xea3d", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x4171105b1e946b92f7eafde8e34e965e2e4ffa38ea682d90fdb2489bbeac561d", + "s": "0x5211f34f34b86078ad4b418030ce7eb97799ecab86b934b174071db376c876ab", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7d", + "hash": "0xecf2a4f0819a44c4e25deedddc07a612160723d0b73fa972a5b4809ab7c7a8a6", + "from": "0x56366ad8f9ef198d7a23afce4392fc9fc34c6756", + "to": "0x0000000000a39bb272e79075ade125fd351887ac", + "input": "0x2e1a7d4d000000000000000000000000000000000000000000000000082c8c611cb48000", + "nonce": "0xe", + "value": "0x0", + "gas": "0xa08a", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x42447edc6b707fc0868d1dc2849ce78104006f783a04758247d3296a3e59bd23", + "s": "0x64ba1897ca0be871a235eb2cdf72c10f6dd2c4396e9bedcf152cdddee0035b4a", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7e", + "hash": "0xf11a1a90a3a228f12d3afdea9e5e18ff7abbe6938edae44ba41dc642ba30d8f3", + "from": "0x6acb2c3fa1217b1f3817b5beb53ef6bf55be0b55", + "to": "0x3d658390460295fb963f54dc0899cfb1c30776df", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x129", + "value": "0x0", + "gas": "0xdc6d", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x12da4bc163", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x96a9b077824ea4687ed0f978defa35c49c07b9e1c4442b5e964cb3d1a30dc287", + "s": "0x7ebb88adf38f3fd93dc1c3f4720bafeba8a23aa09dbc2417e570f0b90702758c", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x7f", + "hash": "0x4609baf2a21af385e2071332f8251fc559a76514c839c7b3b22df81b444d9a43", + "from": "0x1f95b6bdf9a2e501c87bd76ac1d2ffa5e44b163c", + "to": "0x10f227590e62f372798c06f9dff8ab96d25f00da", + "input": "0x", + "nonce": "0x6e3", + "value": "0xc51088c3e28c0000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xbacbe65a0a05889a19e1a079e31dd0f15aeb97da5fb0ca9c3df9d1a2e4b808c", + "s": "0x3f626c3929354914741eefd417b7e5835287a5b7dd8d546511ee4c60c85a5763", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x80", + "hash": "0x5044751b8959a10ee018500c34c2519866156b3019f4fd249647e2cececf317a", + "from": "0x2fa7060f0c23b10b96b0109034b2efba8dfc3724", + "to": "0x00000000000001ad428e4906ae43d8f9852d0dd6", + "input": "0x0000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000cbff2f8f00c00000000000000000000000000040635d1eafb9cc07fa0232f7a87f47d5b1f2a4a1000000000000000000000000004c00500000ad104d7dbd00e3ae0a5c00560c000000000000000000000000002b2fe81487cfdccd2e2cf32819125d9c8bddde0100000000000000000000000000000000000000000000000000000000000000b90000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000640752d300000000000000000000000000000000000000000000000000000000643031530000000000000000000000000000000000000000000000000000000000000000360c6ebe00000000000000000000000000000000000000005e6105d75ef95dc20000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f00000000007b02230091a7ed01230072f7006a004d60a8d4e71d599b8104250f00000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000024000000000000000000000000000000000000000000000000000000000000002e00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000107c0e2fc20000000000000000000000000000000a26b00c1f0df003000390027140000faa719000000000000000000000000000000000000000000000000000107c0e2fc20000000000000000000000000001ff1361114e9a8427139d5b79818c36ddbde43c300000000000000000000000000000000000000000000000000000000000000635e4577566c3c0edad82a143a42ac2ff77bb2a1639c2b6fb288f394b4cf04d96b94ed52e6aa74c040d138cf004597a1cab16a5100a81962c60d8260e2ee182c370000008bd906eccd6eb711e43b70dc3223e2614d800c058670ba30ccc65c9534eb55b3000000000000000000000000000000000000000000000000000000000000000000360c6ebe", + "nonce": "0x7e", + "value": "0xce0eb154f90000", + "gas": "0x2c278", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x1e540b9cd5ab6e9e0d659e61a99d1cdfa6234fceb266379eb530014947467bf1", + "s": "0x4a87b7dcaebbfde68988658f67af2e5b3b616db30c235d57f479e5e0435ae894", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x81", + "hash": "0xd19f73e661be95396454f4c0bcf0793007ff20faa41896fe80c0d7027b98d9e5", + "from": "0xc3387290c153d03b4786b473f3dc6e338efed9c9", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f0000000000000000000000000000000000000000000000000000000000000002080c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000cce41660000000000000000000000000000000000000000000000000000c969cecd711aa3500000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000076fca1adb104770b38581b64d55e67fa5a0f3966000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000c969cecd711aa35", + "nonce": "0x27", + "value": "0x0", + "gas": "0x3f345", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x115126adc3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xdc96e23d572af39b8d8955c00a176cbbf0dacea7e440baf5861f14f37f5b1cbd", + "s": "0x7d8b20a21a0981160d82c5ccb22fb435532ce0af05e84b9806ceb9859514b427", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x82", + "hash": "0x9e4cc002f36e45ea5f2ce1ac68ae1f271062c992356f8c62dbc4aead130f50f1", + "from": "0x10f27d3ae35244062e6a7381a7d412a5ba55da07", + "to": "0x50940a1a04b9e42184f10c2effc38eefbc6886a1", + "input": "0x", + "nonce": "0x14", + "value": "0x5d5d8550b280000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x115126adc3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xc6985cead0ed99204d1db0a8f93a49d864873063a1b65c999e6b9b02dc4076", + "s": "0x647cfdc003bdffdbe3f4dc7c2c25937146a0a174753bb590e0999cf45df27859", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x83", + "hash": "0x84775ffcef593c17ff7453a54e85144665459500fddf5d3b65454e6aa302b779", + "from": "0xc046b0d9d7cd68182a80144ab013860c5ebe60bc", + "to": "0xf7ff004c060caa5136bf86379b7c3e5ac146f2da", + "input": "0x98ae99a800000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003", + "nonce": "0x0", + "value": "0x0", + "gas": "0x1217c", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x1511cf8a78c4b96ff23ae240c4172fe2563135d4a803aa52d6e3ab62b4017d76", + "s": "0x46f92cf592008adcd606ba8fbc253a079fef4f24d676f7035bd7df801c4b7c35", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x84", + "hash": "0x815687511988a0fa0b663c3969467bf981032ef114b38e8533badc77c05a68f0", + "from": "0xa5c764b326019a77c153b1f4e5fea9f924489774", + "to": "0x5880cfe636d12f21c26490b95e2026c581fc2e3d", + "input": "0x", + "nonce": "0x114", + "value": "0x6e8e6cf170b9e8", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xfe54d9669f79c92e9a4ae02808b65973b9b189537a4a7bfd37ef17ccbda0cbb7", + "s": "0x200824fd3619625923ddd15f2cae66a85190abadb0e1bac643e0e4b75e1730cd", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x85", + "hash": "0xa14abf32d2860cc214559892069d5eba7b660294ae9158e972b23e174bc6e392", + "from": "0x3540690b1d60e3e78eb01d51a4ceea482bd4e4eb", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6f0000000000000000000000000000000000000000000000000000000000000002080c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000246163a503e00000000000000000000000000000000000000000000000021ca568a8e6431e400000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000076fca1adb104770b38581b64d55e67fa5a0f3966000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000021ca568a8e6431e4", + "nonce": "0x111", + "value": "0x0", + "gas": "0x3f362", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x115126adc3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x6c84443b2363aa9222e2579d5f69f3a92aa9b00d73eba8ec66c52300e017e109", + "s": "0x72ad0139fce9bba6bab6ef5646c0bc205e5c4f00dee8df175d022590e55361f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x86", + "hash": "0x50f27e4a5b69da5db4a060303dff55ae932a647be53892df3ced91349a546f85", + "from": "0x3427915eb439c923b8632cb19b9ccb81fd3726e2", + "to": "0x3819f64f282bf135d62168c1e513280daf905e06", + "input": "0x61c2acc3000000000000000000000000000000000000000000000000000000000000182300000000000000000000000000000000000000000000000006f443cb88f2c000", + "nonce": "0x160", + "value": "0x0", + "gas": "0x112c3", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xcd96bc4c5766ec136b8afdcd291cc397a54bd32beb8a35f1a4b4c02b0e973f62", + "s": "0x35dc205ac8cb3aa76502bf1afa9c5cfd06a98c0f4af940c1e7dfeddda34deacf", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x87", + "hash": "0x09a0306f483afa6dc44201adb322e6ed46fb2493013a3256386d4fae3bd8845b", + "from": "0xae02988f66a7c024954748317a48c9ef8b2f59e2", + "to": "0x5427fefa711eff984124bfbb1ab6fbf5e3da1820", + "input": "0x3f2e5fc3000000000000000000000000ae02988f66a7c024954748317a48c9ef8b2f59e200000000000000000000000000000000000000000000000004fefa17b7240000000000000000000000000000000000000000000000000000000000000000a4b100000000000000000000000000000000000000000000000000000186bc9e00b50000000000000000000000000000000000000000000000000000000000002cb3", + "nonce": "0x5c", + "value": "0x4fefa17b7240000", + "gas": "0x1477f", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x134917da7f", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xa375d8378a197e546258df18f7ff6eea97fbe8701fec55c6b3679cebf5f6b248", + "s": "0x76b2b8c896142c84ae36934c9e4c57ad9a4e56843fd493875789aaa35a7bcdae", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x88", + "hash": "0x0e01b84ba7c4f18bd61c24d108e7bf9ae0b2df709b06fcb2ace78ea08aa490c9", + "from": "0xd1ea7cc78720c2311f756cc63c3e94faad69aaf3", + "to": "0x000000000000ad05ccc4f10045630fb830b95127", + "input": "0xb3be57f800000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000007a00000000000000000000000000000000000000000000000000000000000000ee0000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fffffe000000000000000000000000d1ea7cc78720c2311f756cc63c3e94faad69aaf300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c910000000000000000000000000000000000000000000000000000000000000d6200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c2790000000000000000000000000000000000000000000000000000000000006405c291000000000000000000000000000000000000000000000000000000006407618a00000000000000000000000000000000000000000000000000000000000001a00000000000000000000000000000000071402768b1d6cee5982ee3ef274bc2b6000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000032000000000000000000000000f0638096047ef0183d0f3232e82205b118e100dd000000000000000000000000000000000000000000000000000000000000000101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000001b16c089710d4e7c1e1393d135d3c54458b1a9a696762ef7163d7c957286534f0076466c5c61059968ad351ed02ac232375002f99063e4bc4696a859b7ef6fa4c300000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000001b6352def85bce5e318e7aa2e75504126ee4dc4b5ea078b59d4e1c571f852e36a2078f50a0a873b5ad60ab50b596d74bc77f98036a125bc4a9b9eb20e9482de14200000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fffffe00000000000000000000000094878d65ee233e643946b36d87a6d94d47f2b94700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c2790000000000000000000000000000000000000000000000000000000000006405c2900000000000000000000000000000000000000000000000000000000065e6f60e00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000d2a7a9ed04f52663c26857b106da76d200000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000001b45c2b9795e6b013893aa5f3ae13bef18abb9b154085d07fb63165e52a182fd4d1ac2b26e061203d74a5edbf8eef72b2b5c58344e217204f2e1611bf1fee95dd6000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fffffe000000000000000000000000d1ea7cc78720c2311f756cc63c3e94faad69aaf300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c910000000000000000000000000000000000000000000000000000000000001c1300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c27900000000000000000000000000000000000000000000000000000000000064070ddb000000000000000000000000000000000000000000000000000000006407618a00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000e6d2d9d11aff90fd52930d27f727d2b0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000032000000000000000000000000f0638096047ef0183d0f3232e82205b118e100dd000000000000000000000000000000000000000000000000000000000000000101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000001b7f5b995a06106ef8ebab240e5d4b0d39263cc3ad14d9253cdc6672116a751eaa722de0c659f9af96b24c3720e0df36f50d47ecb35ec9b07a91954307802d51e500000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000001ba2af6b2406ec19e42633fcc5cdcd86a4175c97b09904638de25dd9480005c3e178f927202c3bcf13aad0db8c642a7764f9a1cfbab3f6c79fdcfcd1584cbd5cf900000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fffffe000000000000000000000000162a64e9988479a960d943418479152c58e0c88400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c27900000000000000000000000000000000000000000000000000000000000064070dda0000000000000000000000000000000000000000000000000000000065e8415a00000000000000000000000000000000000000000000000000000000000001a00000000000000000000000000000000042964f64e42f00ad5ce15ef70333210800000000000000000000000000000000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000001cc09d842bb0c8feb4ceaa3f71f83fc4572ffbe10bc026eaba8917cf2dddd46a613d318d77deee33c461cee924f33ff596550396ebacda214676527185510064f2000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fffffe000000000000000000000000d1ea7cc78720c2311f756cc63c3e94faad69aaf300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c910000000000000000000000000000000000000000000000000000000000000a8200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c27900000000000000000000000000000000000000000000000000000000000064071e63000000000000000000000000000000000000000000000000000000006407618a00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000653db7e081b9250ed52afdf26492ccbe000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000032000000000000000000000000f0638096047ef0183d0f3232e82205b118e100dd000000000000000000000000000000000000000000000000000000000000000101000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000001cf0675a92576b5d092f05d28b9b12a311bedf86f754a0343040bccf19da7e0dd50c61ad1c9029cab87c3264caccd7ecc70d0de30a9c36897c55ba5c43eee70c6800000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000001b85533c98df7a005a79d54cbb8ded9cd7050ec1a6db24eca9f09bf77216ff4eeb39e08728a9e16989dc9d7f906c2b787b1885e2607245a5d80053530fab67f20700000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000fffffe0000000000000000000000001711bc52bf7e0494325799717fe640f1924617b700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b92d5d043faf7cecf7e2ee6aaed232000000000000000000000000acf63e56fd08970b43401492a02f6f38b6635c91000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000a39bb272e79075ade125fd351887ac00000000000000000000000000000000000000000000000019ac8532c27900000000000000000000000000000000000000000000000000000000000064071e620000000000000000000000000000000000000000000000000000000065e851e100000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000695badebdda607713eccd025168b8eb500000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001b12ffb7865fd6fd4e30311a4397fd1cf27ee25c449f5e1575a351abf3771967e1315c931c9308c84bba1100d2ec23ba53e61162a7af5f81aed05437ef0bf55d8700000000000000000000000000000000000000000000000000000000000000043649125d44bf8abfbaf4f4f7a5ba9677b75a4087744d9a990647f9a2ea6f2755d44bdfa45631ca0aff31ecedd3e4d5d4e5d62b221587a46f2fc7634b2c260277166e83bd57fde7da65b5be4c68bbf85ec8c8e66cdf124d608a6fcc7f50976d12556d411c0d6445d3d42e72cfc286bdfa82b7048a05a40522192397b94c2a80d8", + "nonce": "0x200", + "value": "0x0", + "gas": "0xbb1eb", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x115126adc3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x999f163f59640094bda57d96ef28d654f9be77ffbb3a4dffc62d5935780f6a28", + "s": "0x58966dd0347d94859f9c812e8171bf31a9f55d3b8387f96343af6fc1c68ac8f8", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x89", + "hash": "0x19ed38d14e13ff5d2b841d6ec0ab52fe88ce7f5ff72f19a6b1755f81517ca242", + "from": "0x914dfe6933ccc682cf61e3830b94d4f29e322789", + "to": "0x4cd6f715722eb17c489ca803fb63b7e4905bc895", + "input": "0x", + "nonce": "0x3", + "value": "0xb1a2bc2ec50000", + "gas": "0x5208", + "gasPrice": "0xe7159518c", + "maxPriorityFeePerGas": "0x1dcd6500", + "maxFeePerGas": "0x115126adc3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x82eceec7b292fa11dbc2dad9f46c0ecb6a5c40320ba486ddef3ae191abeec6bb", + "s": "0x6e2394710a040b4768f851b05d8445e412cdd80f62410df8410029c05b512861", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8a", + "hash": "0x5f9a540fbc2a1c60c0082782d10487a38fbb60d7e8fd3f2081acfd43e6f44bca", + "from": "0x881ee91c8d73b87c3cc55f641e209a376a0d4988", + "to": "0x6b175474e89094c44da98b954eedeac495271d0f", + "input": "0xa9059cbb0000000000000000000000008861bf5a6baba5c819b359552c6f0926516d62d60000000000000000000000000000000000000000000000dd24ab9fd6e7753293", + "nonce": "0xe", + "value": "0x0", + "gas": "0x9548", + "gasPrice": "0xe6f8f8e0c", + "maxPriorityFeePerGas": "0x1c03a180", + "maxFeePerGas": "0x100eeef0c0", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xc8caf4c5b9c6396cb324daf0df2f42a419c0b7e24472562e02e0552f950ae1cd", + "s": "0x61c2d582fe6142f85631105baa154206d17590367ba9d36ab5a6138d5d7ae7ac", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8b", + "hash": "0x40057dce5c3f5e9937bea8af983a30c60e1a911991fef550958855631b537370", + "from": "0xe270b637c579f35205903634af4e6184ef09e53d", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb0000000000000000000000002066a0d20b9f51f265b2a40089630c2a831e91af00000000000000000000000000000000000000000000000000000000d3897140", + "nonce": "0x5", + "value": "0x0", + "gas": "0xc612", + "gasPrice": "0xe6f8f8e0c", + "maxPriorityFeePerGas": "0x1c03a180", + "maxFeePerGas": "0x10156c58be", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xe920aca4c11f498cc58f731d59bcf168260eeb1b4bf853f3b63aaaa1f065adc3", + "s": "0x1333d9e75b17f4f190a8783896830a0390ffdd3f2a838fe81dfd8887cca7ab3", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8c", + "hash": "0xe9e2e8e6592eff69961786523132ef651adf04d3f870691d752ea93065c342d7", + "from": "0x227ba29ae8bbe041457d2d2d0d71f6b8188a1cc9", + "to": "0xbd6f2c9d4ecde067cd91ca9facdf1dc444bacec5", + "input": "0xa9059cbb0000000000000000000000007a751a25f341bdd6a493af1a77b6ce0b090e00820000000000000000000000000000000000000000000000a2a15d09519be00000", + "nonce": "0x90", + "value": "0x0", + "gas": "0xe295", + "gasPrice": "0xe6dc5ca8c", + "maxPriorityFeePerGas": "0x1a39de00", + "maxFeePerGas": "0x1013a2953e", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x2680f37cef7f72cf8ebb9e9a11addb3f05f64e5e55a9486edbe18dc5f7f187af", + "s": "0x68811d77273330739945a9004cccd6b8ae01995527dcd39198ff72689438926d", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8d", + "hash": "0x9d505bea08d6a12b73fc579ed037a24ec8afe9f238d7aa3a3accad59c801e0dd", + "from": "0xf21b51c3547065f20a5e2feb6ae373e5d44706da", + "to": "0xba12222222228d8ba445958a75a0704d566bf2c8", + "input": "0x945bcec9000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000300000000000000000000000000f21b51c3547065f20a5e2feb6ae373e5d44706da0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f21b51c3547065f20a5e2feb6ae373e5d44706da000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003800000000000000000000000000000000000000000000000000000000064076a7d000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000100fd1cf6fd41f229ca86ada0584c63c49c3d66bbc9000200000000000000000438000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000e682a70351de5d762200000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000096646936b91d6b9d7d0c47c496afbf3d6ec7b6f800020000000000000000001900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000808507121b80c02388fad14726482e061b8da827000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb4800000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000e682a70351de5d76220000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffbc75be08", + "nonce": "0x9", + "value": "0x0", + "gas": "0x343a5", + "gasPrice": "0xe6bfc070c", + "maxPriorityFeePerGas": "0x18701a80", + "maxFeePerGas": "0x10963c2200", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x1fc93632754a476599bd941d1035bcee72998a0f52f3aa583ad8b84fd5cddc7", + "s": "0x212e34aee48840b2b688ec3cad2fa4d9a12eea965c614eac6195a202f59e452f", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8e", + "hash": "0x335e080f0b480ca9bacf89079266f084dd760066753a2c46b1c499b74a4437e2", + "from": "0x522a879def90fe14eeb539940761801096b15c36", + "to": "0x6b175474e89094c44da98b954eedeac495271d0f", + "input": "0xa9059cbb0000000000000000000000008861bf5a6baba5c819b359552c6f0926516d62d6000000000000000000000000000000000000000000000007a74fbd725dacf9ae", + "nonce": "0x107", + "value": "0x0", + "gas": "0x9548", + "gasPrice": "0xe6b63708c", + "maxPriorityFeePerGas": "0x17d78400", + "maxFeePerGas": "0xf7fd03a6d", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xae7bb49d32b485aeb743abc569ff5f9aecf97442111ce8c076dcdf8e773c4428", + "s": "0x328b3b8ad53a46eadd5028c2ca99286c622f036db7b0669e29c3b2cd22eb0289", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x8f", + "hash": "0xb0ad6199f8732d448ac7ee9c9004660f9d9e970948f4113e0f002ea25950b22b", + "from": "0x30bd0812c39e643b373774f96602f64ecd26c39e", + "to": "0x6b175474e89094c44da98b954eedeac495271d0f", + "input": "0xa9059cbb0000000000000000000000008861bf5a6baba5c819b359552c6f0926516d62d60000000000000000000000000000000000000000000003350354f720d5315fd3", + "nonce": "0x189", + "value": "0x0", + "gas": "0x9555", + "gasPrice": "0xe6acada0c", + "maxPriorityFeePerGas": "0x173eed80", + "maxFeePerGas": "0xfb452e527", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x7071f54443d47b11c61919d6d302aa6f1537e249a7b5f66bc92c6bb0ab817b0f", + "s": "0x54f66f472e933602817113f60db64ebc6746424ffdfe7f47cb779f1efcbef5b7", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x90", + "hash": "0x006b361b42aee1b7a91790cacede15356421a29e0281727bcba2c145fd7b0da6", + "from": "0x1b345d33772089a0f3b83a863f32efb6d80629ed", + "to": "0x1111111254eeb25477b68fb85ed929f73a960582", + "input": "0x2d9a56f60000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000a90dcdaa23000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000582d872a1b094fc48f5de31d3b73f2d9be47def10000000000000000000000001b345d33772089a0f3b83a863f32efb6d80629ed0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002cb417800000000000000000000000000000000000000000000000000000004d8a91e9d19000000a4000000a4000000a4000000a400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000000a4bf15fcd8000000000000000000000000303389f541ff2d620e42832f180a08e767b28e10000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000242cc2878d0064076316000000000000001b345d33772089a0f3b83a863f32efb6d80629ed0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x353", + "value": "0x0", + "gas": "0xe310", + "gasPrice": "0xe678648a1", + "maxPriorityFeePerGas": "0x13fa5c15", + "maxFeePerGas": "0x10a43d5fc7", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x96867bea1eb7b75a344403aedb0e4c4dafa5f287962fe0eb4428f4364aad8a2f", + "s": "0x5e0f2e2564b821636b1a86fc5ce87103dd2df1c5766dc33daa9ae55875f25609", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x91", + "hash": "0x5faf831deff94dec62a13c83f98db5bd5799a5c34a35757907a2cb26140cfb8a", + "from": "0x9406da539cea76c843eafb795972c7370a437a8e", + "to": "0x1111111254eeb25477b68fb85ed929f73a960582", + "input": "0x12aa3caf0000000000000000000000007122db0ebe4eb9b434a9f2ffe6760bc03bfbd0e000000000000000000000000064df3aab3b21cc275bb76c4a581cf8b726478ee0000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee000000000000000000000000397973ba6e752943ea9146f88414d1f379fd427e0000000000000000000000009406da539cea76c843eafb795972c7370a437a8e0000000000000000000000000000000000000000000058d844eb968dd1a7100000000000000000000000000000000000000000000000000009b4dd7a988444c3000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c30000000000000000000000000000000000000000000000a500008f00005300206ae4071198002dc6c0397973ba6e752943ea9146f88414d1f379fd427e000000000000000000000000000000000000000000000000000000000000000164df3aab3b21cc275bb76c4a581cf8b726478ee04101c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200042e1a7d4d0000000000000000000000000000000000000000000000000000000000000000c0611111111254eeb25477b68fb85ed929f73a9605820000000000000000000000000000000000000000000000000000000000e26b9977", + "nonce": "0x42", + "value": "0x0", + "gas": "0x7941d", + "gasPrice": "0xe678648a1", + "maxPriorityFeePerGas": "0x13fa5c15", + "maxFeePerGas": "0x10a43d5fc7", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x6cee4bfe87aebaf1f4b4515ffe33cd9706cc91744e1b1b21e4b80e9be5b4a2bf", + "s": "0xcb54427c440e6349e1a8c29408953b8095bf09e608fbd0f4f510f1deab3d469", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x92", + "hash": "0x4885f583334e5252b929bfbf2ae49dd21ce4b7181e5d24c95449e124c8f9cc5c", + "from": "0xa7b69aa4335db9f779763dbbc24c98d9c6b60d4a", + "to": "0x4751695aab223d440b9b8f910ad449406d8e8700", + "input": "0x3ef13367000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7", + "nonce": "0x13b8", + "value": "0x0", + "gas": "0x23668", + "gasPrice": "0xe5fece99d", + "type": "0x0", + "v": "0x26", + "r": "0x8d3b779e4a151b8e7ab8fc50d6ad9a33df6aba3ae731c5d3b375eba3dfb03c6", + "s": "0x746564d34cb71a6c233f6d4b48454fd35eefeb135a7b5c0f024c413813386417" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x93", + "hash": "0x209bdc92a643c24939223440ffc107211bc38570d2011ae08a7912766a936165", + "from": "0x779701c8e02e1f66fc2a315a39527be832e88795", + "to": "0xd6fca65266adac7a35d679ad53dd3aea1f94b95f", + "input": "0x8119c0650000000100000000000000000000000000000000000000000000000e0f4991f6a0b86991c6218b36c1d19d4a2e9eb0ce3606eb480001000100000000000f41dce0554a476a092703abdb3ef35c80e0d76d32939f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20100000100000000000f404c88e6a0c2ddd26feeb64f039a2c41296fcb3f56400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x1dbf", + "value": "0x0", + "gas": "0x36024", + "gasPrice": "0xe5df56af0", + "maxPriorityFeePerGas": "0xa697e64", + "maxFeePerGas": "0x5a3b9f43b4", + "accessList": [ + { + "address": "0xe0554a476a092703abdb3ef35c80e0d76d32939f", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] + }, + { + "address": "0x88e6a0c2ddd26feeb64f039a2c41296fcb3f5640", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] + } + ], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xb58fb31732f2d20337afc7c0b40c3e96f0de572643867d47c89f74658511173e", + "s": "0x3b64b4a6dac5016f1d99935c33dc7e68207cec6760ac389ce1961cfea5c2b896", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x94", + "hash": "0x5d9af45992f461b3e85d45df0435aa0c2e4c671debd351cb157dc2d95fce58a7", + "from": "0xcefcca169357a18eb8c3f230f92b58562e48cae5", + "to": "0x3ee18b2214aff97000d974cf647e7c347e8fa585", + "input": "0xc68785190000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000041801000000030d0051f50b16db2424c1289cc7eb28388c3a39ac1b34fd1e038d8ec71dd198e02f3f594ea6a1b1ef8e9cf118898b6d338cf1fdbbbe419a82a20f4ffd7c24d43fdaf00102c5b26b51c37f8bdb8592f2495114bc16d37832fbef56a1e3b8b84237262509c51640a853133f547eca8144a86c6cd7d6b8393969987035e0bd30a57eacd4e57c0003b19304dfb3963e73f1addfde15974300314937d62aae0a78576e85af5e1faf9e11a0d09d6a35f42db06c226c2ff9ec9bb540ab22d125a3abb19e9e5a6579ac240004cfeda01b1ad602f11c26918fc5a25621309c71e5ab660cc5d583c517b9890cae09d9f698f4d349877e42ce707c805ef98c2f30a073ec3e89c7ef23caba5606f300064ab21d5cc4361c1f3d24f36e6b9f0a00a555070f99ffab0779ec1f90a9a4c01f65f6868dbd59bc1f032ddb2f9bd65d188e115cb65120684752e7d0f679fb53740008d90410e564736e559b8d110800e94400d1919b8a38fae59ff1bfaa9ca6d7ddd63a6ce0ad94d4094fe0a307ef4a210092b9a148a38810a86a0d79f34b0d8f2914000ad4c802ebb30efc95db4eaedda0f5f22a4a5b80866df4d69cc5ad27901d224b2e53e493a0ce08846067592a723c7d6f0e8540f9409788ac2fe09a5479e79cd11e000b45e5ca863e6a2204af1f2477857dbb9ed171bb9756ded6eed69457d0462007d1535a5c8d7dd60076daedf56b74e1b95c78ff772db85fd2608cd6114615cd1e95000d8ee6bdbf283d5c15642dc466c9f0861aa906052da4577a9d8b6a51f3061a41f71fcc80da7222152449ae78597bb9a08d7e00f2d88db5b7b0d5ed2d89dd2f428b000eb368e6d99debe9b5d57643b4b661a170f88a73e0d5386254266e5903479e3a227a297afc750ea5a774b6281bb55c6b3d9ae12126c72972a1f8cef0210e4b0974010fbf968d7c43ec6415506ced5337ca720f0296d8fc5493554abeae8f6741f35c6d773e7261619f39161d267ce932ad8add8557a6b1a7f1a68ceb5c1c342e39469a011092fec6087b7e20f647a9fea083bc0a2c7193fc74685eaf199df32432f77ada8a6d040529b339f922d7fdb3c8424453aa6eb636247abbec1e482bc34dbd5b5ed500125e464bee43895bbf14c093e3ce2d7fdb31bf7f33cfb76d4046dec93c2e8f31a31c5c30ce9ec2f33bd2a1b4711204ee527b4175e52961ef77e12c7563e4a7b3520164075361000148b800030000000000000000000000007cf7b764e38a0a5e967972c1df77d432510564e2000000000003f02a000100000000000000000000000000000000000000000000000000000039670c5f8001000000000000000000000000000000000000000000000000000000757573640003000000000000000000000000cefcca169357a18eb8c3f230f92b58562e48cae5000200000000000000000000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x9216", + "value": "0x0", + "gas": "0x44ad2", + "gasPrice": "0xe5c7cbe0c", + "maxPriorityFeePerGas": "0x8f0d180", + "maxFeePerGas": "0x13414e2a81", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xe8561a1ac7f089cfcc3b6a6f4fdb17e8fc512c82929b5d8072f187ceca133dff", + "s": "0x1d1b22f4501530c059ec1306859c9a7c91037f37a5ce97a5b26d9e10f7dc06ce", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x95", + "hash": "0xa550d5b4cc0250a9d84b6feaf67d8052925b41dcbbe2edcecf19a21c0bdb02a2", + "from": "0x9afa066884ce723200438b672c0b8d5769e03a6a", + "to": "0x4c81c35e0e8ea7128def9bcf1586dc92918e6c07", + "input": "0x", + "nonce": "0x3d630", + "value": "0x206b239f9350000", + "gas": "0x5208", + "gasPrice": "0xe57a56800", + "type": "0x0", + "v": "0x25", + "r": "0x564dbae15341b9c3b1a5a5d1cc8c3fe2e553205caf25b0ca72d6b317e3efbf1e", + "s": "0x6f1bc5601e59e251461b190a3cc8065e3b5a5ba163933f3638a64e3f90c66ac5" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x96", + "hash": "0x9eafd3470cb5f7be37ad5d31262d3e7c11bde536f5ca6021540e965919b84472", + "from": "0xa21740833858985e4d801533a808786d3647fb83", + "to": "0x9008d19f58aabd9ed0d60971565aa8510560ab41", + "input": "0x13d79a0b0000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000005e0000000000000000000000000000000000000000000000000000000000000000400000000000000000000000002ad335dd3ca11c18cebbbb583b9613b6289d75f0000000000000000000000000f7b3f5a8fed821c5eb60049538a548db2d479ce000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000002f1c8be9800000000000000000000000000000000000000000000000000000002540be40000000000000000000000000000000000000000000000000000010afe92fcd7590000000000000000000000000000000000000000000028282092070d6407df6000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000ca3121d4459f3908ec0a39c1f88d7b64855664bb00000000000000000000000000000000000000000000000006bc9c167bb0cc800000000000000000000000000000000000000000000003034354d744473eb6d5000000000000000000000000000000000000000000000000000000006407551cfcc07fc9316ea29f27d361457b98f622c71490b7c9ea72d3a7e0a59d2d370b6d0000000000000000000000000000000000000000000000000033bf4358013380000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006bc9c167bb0cc8000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000041b8ba3bed7e135190bf1caa6e0dd0324e371ef9970a4fbd7d5f2f9105f913811505da70e5ad1c8918237be6659bad2ad3fb1544e1a6203d1e74bec6ce8cd9e7591c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000af50f501bea0e1066e81090e7b5307528c76ed2a000000000000000000000000000000000000000000000c9f7e99db6a6634021200000000000000000000000000000000000000000000000000000000eb918a030000000000000000000000000000000000000000000000000000000064075519c86d3a0def4d16bd04317645da9ae1d6871726d8adf83a0695447f8ee5c63d1200000000000000000000000000000000000000000000001a852aa1eb116e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c9f7e99db6a66340212000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000418780d387d51b52e733aecc62e028c1b7123da0c9d6b4686481cb720bf836ec7d5b695280210ce28b008b2c65c402aa0f3a0750a7cb5a2841a1198e15bbfeba841b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000005200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000032000000000000000000000000002ad335dd3ca11c18cebbbb583b9613b6289d75f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044095ea7b3000000000000000000000000def1c0ded9bec7f1a1670819833240f027b25effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000def1c0ded9bec7f1a1670819833240f027b25eff000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000148d9627aa40000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000c9f7e99db6a6634021200000000000000000000000000000000000000000000000000000000ebd29ac50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000002ad335dd3ca11c18cebbbb583b9613b6289d75f000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7869584cd0000000000000000000000001000000000000000000000000000000000000011000000000000000000000000000000000000000000000094f5627859640753670000000000000000000000000000000000000000000000000000000000000000000000001111111254eeb25477b68fb85ed929f73a9605820000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000c40502b1c5000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc200000000000000000000000000000000000000000000000006bc9c167bb0cc80000000000000000000000000000000000000000000000300a9bdf4fad791a5770000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000180000000000000003b6d0340a7480aafa8ad2af3ce24ac6853f960ae6ac7f0c4000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x79de", + "value": "0x0", + "gas": "0x9c460", + "gasPrice": "0xe569ec5ba", + "maxPriorityFeePerGas": "0x312d92e", + "maxFeePerGas": "0x19879d9622", + "accessList": [ + { + "address": "0x2c4c28ddbdac9c5e7055b4c863b72ea0149d8afe", + "storageKeys": [ + "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc", + "0x34649aa282e0e8132f3d7cf73f7949d459949e828bcceac8be09b917eff48f86" + ] + }, + { + "address": "0x9e7ae8bdba9aa346739792d219a808884996db67", + "storageKeys": [] + }, + { + "address": "0xc92e8bdf79f0507f65a392b0ab4667716bfe0110", + "storageKeys": [] + }, + { + "address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + "storageKeys": [ + "0x2ffdfd7751766c16274334edd795663993781d066ef5d5522aa5e39bfec0e031", + "0x5f234ea603c1b079d51a659a6b2e1f6a1fda8efbf839a4b6881b9c411c8a9953", + "0x9d98752c354deebddd53535455198eacf8cfb934237d3523207f70386be5e3dc", + "0x062f39e90a885e25aea7a8461dc1f2f37071d7f144120b73352fb34f9b2d508e", + "0xaced72359d8708e95d2112ba70e71fa267967a5588d15e7c78c1904e0debe410", + "0xaf12155b1f4ad919a68a14bca822017cd0381d9f295c80b9aa8cd2b364136393", + "0x68a459b82a7e3ea8c1bfb2482254387d334a015103cb91e5bb8ba7c386ec60a9" + ] + }, + { + "address": "0x02ad335dd3ca11c18cebbbb583b9613b6289d75f", + "storageKeys": [ + "0x75498f116f989e42d11000ec6d6e9ca16b13a6d6fc7ed88b600c6e8486be3d1d", + "0x618059526d7d1bb5608c8e3a0740d1f656fa8a764ecca600a8e0e3e0c313ce66", + "0xffec0d9049a329b0164005e75f8f0fd6f7f20dbf23c622824210754d0061fe88", + "0xe8ce7c409f3c51051f74f23e839822a714956ab70ac7e4058caac9ee833fd8b4", + "0x66a6bb5952166f672cbc2f23a3447744c4f9b69c69c5ec9b3b0615b12965df49" + ] + }, + { + "address": "0xdef1c0ded9bec7f1a1670819833240f027b25eff", + "storageKeys": [ + "0x3a1eae4d9599b151de37596bc1e301e7833e536acda254c41c5aab51deef773d" + ] + }, + { + "address": "0xf9b30557afcf76ea82c04015d80057fa2147dfa9", + "storageKeys": [] + }, + { + "address": "0x3acbe97a79d748011b24e6df934fd72a98269bfc", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a" + ] + }, + { + "address": "0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a" + ] + }, + { + "address": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x7c7d31753a5fbb06b1031d9c64d1f5f2e847f3dd13d84777e7abb56bcce39d52", + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0x0000000000000000000000000000000000000000000000000000000000000003", + "0x0000000000000000000000000000000000000000000000000000000000000004", + "0x45b1147656da4d940c556082f0e09e91e3d046c1c84468f8ead64d8fdc1c749a", + "0x31adef62206227419133dd9a6b4041532c22595206a596cf74f19493bfc8f368", + "0xbcdfe241f132b38477ee35d4e497c725e4d20778d490eecefb1940b28dbce0ca", + "0x8f8f4e5ebc661d2cfe711d2365fdb2c9b0b290579b5fd3b3f49306b5454e0926" + ] + }, + { + "address": "0x1111111254eeb25477b68fb85ed929f73a960582", + "storageKeys": [] + }, + { + "address": "0xa7480aafa8ad2af3ce24ac6853f960ae6ac7f0c4", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a" + ] + }, + { + "address": "0x0f7b3f5a8fed821c5eb60049538a548db2d479ce", + "storageKeys": [ + "0x0000000000000000000000000000000000000000000000000000000000000013", + "0x000000000000000000000000000000000000000000000000000000000000000f", + "0x34432d6655b7ea717c1cde6d598e2b89182e28af746aee541dbfc5dbf96e7790", + "0x93c99d828501daf4802e8f8ec30800a6b9afa6e0c6861a289dd137c414ca7252", + "0x000000000000000000000000000000000000000000000000000000000000000b", + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0x4bb49d2707edd798f854a8cd20bdb94951b0d17e51677e785369598e4b242c51", + "0x06a07ad4014e07909adffee1e5bcaac547cf73b53186dd71c8ae501d9d4e10be", + "0x8ca9160879d6376583aa70feeddd766e46e9f1c53841865d0f32c6a3a471ec1e", + "0xa3ff1d91559af46e55ed65a27d7103747ebb9dcac3dc47a1bfa6664016b52b70", + "0x0000000000000000000000000000000000000000000000000000000000000016", + "0x0000000000000000000000000000000000000000000000000000000000000018", + "0x000000000000000000000000000000000000000000000000000000000000001f", + "0x0000000000000000000000000000000000000000000000000000000000000017", + "0x000000000000000000000000000000000000000000000000000000000000001e", + "0x0000000000000000000000000000000000000000000000000000000000000019", + "0x0000000000000000000000000000000000000000000000000000000000000020", + "0x0000000000000000000000000000000000000000000000000000000000000021", + "0xccc31e0fae36532281adf8be326474f0b216671b0c21e2521a81b98f521118a0", + "0x618059526d7d1bb5608c8e3a0740d1f656fa8a764ecca600a8e0e3e0c313ce66", + "0xe8d408941fff9b8763fc0b689b4d79f9586be7573fcfa4add61085b973ba93e2", + "0x2a1d939f9dd63be28b7554c00edf4264526e745e46ad47db8c3200d5df623370", + "0x8f650335965e2c1a0b415df0b9921151c8979a6fdad2cdaf6b9a7248173632dc", + "0x3f1a8936e3f9531edd95b50ed9e5cf13d2ff100acba4dbfb45165744f3524c1b" + ] + } + ], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x9f4e681ae589e78f2069f52fb291354d3fd2896669b27af6422fbdbac409cea7", + "s": "0x131b11bb24856075f8190d4f3f87e3b351540fc5521648b8c48c617d939c420", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x97", + "hash": "0xc5ecede0bc21ee2b5fb00b952aa19fae62836b57c4ecd6d53c6777ed5ce4b687", + "from": "0x8c8d7c46219d9205f056f28fee5950ad564d7465", + "to": "0x631e863eda8f738ed7b3c19c5a8b1d8f43480dc5", + "input": "0x", + "nonce": "0x3bb59", + "value": "0x3d7a126b9bb007", + "gas": "0x5208", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x1bc4fd6eeb", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x5d2df47a7dce321c62c2019f9bea9d76920034e32af9b6afe3438ddc55b4af04", + "s": "0x926a12898158285ff472c01edbc45933b10105a964d8488e21aacdadd743dff", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x98", + "hash": "0x007744b83af0349d99f0885a1157c5db75aed813a516b1d5436a0e0036e2418f", + "from": "0x8c8d7c46219d9205f056f28fee5950ad564d7465", + "to": "0x8737df69e59cd024402784fc6ad393548ecfb320", + "input": "0x", + "nonce": "0x3bb5a", + "value": "0x67d9b8577cd418", + "gas": "0x5208", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x1c5d98b551", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xba2cbdd58230e1fc29cd28474f0908107ad5f5e34bb1991c5db6c71c2bf05dc8", + "s": "0x39fa7dc264cb59627ef85089576b7da3098bb42f240ef3dcb949db2c6eb97ff0", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x99", + "hash": "0xbbe54103e193983c13d5832204b8346497e79fa9092e994fe0e9855019a16015", + "from": "0x802fda4abeb5d0330b2a0197e7a09878eadde3cf", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb00000000000000000000000046f5531aba0ff60cc4b88c972f84f1cc2967df670000000000000000000000000000000000000000000000000000000029b92700", + "nonce": "0x8", + "value": "0x0", + "gas": "0x10e19", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x11062ff46d", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x6617225f9217f724a875beb57fba6a48a550963d25e3fc3677985142062e367f", + "s": "0x61c93bf1915a8d19ecef2f860c5dc88cd4ca9559f3fcb4a4e54339b492bc0b72", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9a", + "hash": "0x8d17bf8ef2aeac1848ea4491d8de2990916660cc53c964fd86da37e096a98a5d", + "from": "0x473300df21d047806a082244b417f96b32f13a33", + "to": "0xbe5dab4a2e9cd0f27300db4ab94bee3a233aeb19", + "input": "0x8ca5cbb900000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004b6868900000000000000000000000000000000000000000000000000000000000003cbcef304ca436bbc429a20a782b0611878dfbd5c9bc4dbbb1e81d8ed631b0d377a323e7f121cd840f3ff49680d7cf577073a4d01696ad9ca017a1618c94c587c677271694646f483067d64c7184a55ad3c3523cee5e9f7f8f578aef22205d23b4ab29b24fd6557590fe399f7864a909d9205de3cb1ee86e5ce0f6a136fe5709ac5ab93fdc535fef969f59acd79cd02840cfb51249b56fa13358311a38affc4a7af41c7a5ee6bbce27bce01e1497f12d6ca9cb5bd62c7987c9ca297c7f599d408259124c5ea1627648df292e17ba80386ef4b160b335172b30d29baddb7679a287bd3fb6a059f1d673ccc136b26afab2d02dfacdaf206bf1f18cba97e809da1154d7c584ef5b46c08f78e4f02350e716dc45e1d5e04a610631e7ccdb8c474d52f0d0566e74953b831a10266c11a5cffec0a10becacfc83ca3b7d358b8a632e1eab2bbcf9f3c6bc0ebd5ab545760caefad86364059d0ac6c097903f50c8180a56c4c675b1bf22e99d0ccaff59a18e896ffb5c1ccc3c7bc40e520ea6a6b6af332f96d9e29ef91da32dfe16498492635c8fbfbc240d24269db3d7a15b9d3000c3cf85bbf8d6d38235a38d7bca984c5fdf7eab5d3bbe60680007359ca794b80b7337253e8c8025a8ea13630bb808cc7dfdeffe001ab7b52a700ab2c70a743f42d44339a6ddf6ee6276637a867c898a34b2814670ef7683a06bd5c40a71841a3149b32911d50c1584fb50ae5f3208cfdc25c0123cb0b6793ca2acddc4fc4c49234daa6315a8c6b7b031755a19c8e932d0de046f4a8fc0e9f001c73210e7c8231f1015d4de9001dc9cd05dba773b9da65eab3d5c66015ffeec5a96738566d6f4c813982a42789d5613eb6063dbce166f2cb6815d6c2ee6df8b520956092c15393e613f6184ca240a8e409921cffc855ccba5b07d20256b0048803637b88aa2ce71a63aeb89b9d3e2c05e72f2464b2b96f842b8c756ae426c36986b107599c93e919077aaba3b544dbcc38e881b62da144fb76ae1a8cba54f406fe45ab37a00c2e329d13a09f5a8b03d54c817b6129dfc831aa9b9c2ef49d49fc3d09664ca3bccc77f23981d116254157e3dcf80b4c629a375c693d1aef2b606b70088cde361e198c6eea901813d7c914848c1257451f3cbe5f43f62cf9307597d8747069dea3b4cc04e2192fa046ac533f696c9c8c7b3410dc42aea3e3a8c4dd9d397bd444c248b7ca29a6d70847995ba23a2761be055e58099cb2269257da484b3a11a4c887c13eaa4ec1bcd5603d08e445be0fad90898694ed8b252a96af09b391100fe07e1ad684a058a14487c87469497be92480fda144d98d1dced17c689965bdb2539aa74744ed5e0f4de4004259ca7f4b03b9ab24c1288fb5dabae4d2e69980b6e26d515a296fc0fc0acf80cf9643549a10c99e8ced63b156216c81d1a193f02c834f9fdc00e73bdb1b980eb127a0f6430e32b35e5d99b77baec88ff6bdc08c0397947604ad4e3b0f8807e0a9be02cc688079bc8e0db7fdd707965c2a2c4d7236c3b88f9f84e16b50af98dd237348ca13cc598c4c0c2978437e380520b19912930b4d7bd0663edc21b99cc5f771793b5301e6a4584ab900708ef99669e32e326f244e2ce4a2b2c23b3799df85683ace98d3d7bf15b754f9a46dcba9f7f15c7f63daf7d42cc688afd0d14999822864d97df532b92a37b18adc79f82bd87bb9c9a51530a4d42e83ff037d17bbaded136630b38bcb87db6939216a324db7e3453fd9a697599c1cc9c293825e422376e3addfa4e40a61f12c4d45e3f098e55399d97bd8e10b54ad18259c2ea0933c2aaa50635066cb2ecb51d8a32126ca17625a5d2885260209e9afbfd95e95861acb7439f1c854aef71d567a71b429464457eaefa1ccf807171511c7ba207fa7a063dae0806468580b4758e65c192842c5361a5ab29f16f7d3531ba80be56208a55c4229e07b4b741154104a2e596d1f8c516dea2e0a44427c93d4c93ee04acebc7a57014759b04a320e48e34b4e6c60c5e5dd1944f47506ed82ac99c56d278114026476a2c7c38f4d118d6344b569ed508a833c06644cbe987432d6ec33bba678a67de396c769f2fd3d455fe2670108e83a11ce056620d0624b9f494fc17ad6e001bd8b41479cb125163a12480356300dd1c4d6536f23a8a870636736e6bf97a16a4ee16e8dde9edb11ba72be0e8f74d20641e726f7b85f5c4c868c5d092c45845981e8850493504682260a98bc7ecf9826fb4631913d30a606ff16dd3af04566eae8d4c52b4d8ed0daa4acbb964933b76f7bae865d4ec8e749091f598a537c26bacc1ea7a550bf2dd6af5bd140574221ff201055f8f3a4b30ce8c776d5fda7f26babd8992df329f92bf4635f0ede8e2d5db6b8f9860abc017796cee6cdbc05dc54adfb9a0847c9c3ab56c79de8ce3dbbaa9141a1368574601ead80f9b3f1d620d063dc5522026039154f2c68ed916b1f62bd952d4d774f628db84ed85c60eaa4dab44cd441bf5af321d128edf8afacb92264d02df7e3a95f6f7958d1b9374233323aa70d7ec97273eaa86a0e975cb5916b6dbb15311410eb1e339b7e9dccfbb480456792907ce4213c403e92687f6229a151fc0c5bd09979203f796edffbe6817f9480da2fae05c02030aa436187571d058bcf15f0e1b45406bd1e60a3103ea733545500313cd89af7d4a1d7f40a5141ff0c212fe986fb7c4ac7d66e203e75154d7cd7bdd406393de0a5e4b1f547823b6209c87afa34ac8565870125b3235b3e6e91e4addf9d5ed9c06cd69691999ede0fe7eb7e153857a8f338f9349d1c85e8ba29518eee7d436b262664f146849bbefea7e6f578487fa941e3c02ada6583926caf7f48d19b0e622962fe1ee9222227941f09bc65de01cac8327e3dfa35a546fb4cfe74bf68aaaab30cfedafc72bff1c119d62ef29d2027be59007fbfaa856b82f49d1c943363d712aa8eb04d90ad0784e846ea6392d9689d16113847a2f5054c65b072a5bc0eaf49be4ce6112ae6c134fab11b0ea94c90c094d2c4d2a36543ff197ab4ac6a94b3cd8cef9949e786379c156285b309cd3bce35a5f237179c96ea9e49e47100e5dc080418473d2d3d4861792822d8888c3c0fd69231926dbaef36f0cd5b67cf78d3df79dbf8b568b33b2df6d47d0ac44f2225354d769e8ae057d31f2c2a8ec5470f673b28a5efde1edb74f5ff49ff0d343e97392fefdd4cfeedaa1467244bc1df37dd4d62105c1cd3f132d6aab5d15c89d698a796e96ef95dfe1ac6f472b20af1ae9365e6c0a027efd13aabd6c0931f78244d6069e0031e3e58e73e25fd0bf1e08da05acc807cd6c4386cbc7f2bbcdc9bbea21ea987d01559a93e24731f9274c651abaca9a1eb234482390beb005c22d36e8199a8d083243141ec5d94ef8874b12ece823db299a2c24cd55bf87cd5741393fc301419e67cffd12447639b3c4a0c518b6983f4089d51441ff8b2d206518160188cafae042b5abcd8faaed4518ec3be29cbe3738bd63e545a3dab2bca2a44b1b31edb4cd97205cdfe7be64373ef7a8eaa1171cbcd48731f9afd91bd111abba326dfb21380354765e18b9f1d0d7d0e062782392b5171b294e49b2343019f7562366c815909f86f60517140d5702b695dc6137b86f1f9f4efa03697e9fd9da5d8a9f0479fe15ba64f01d87cd8af9b335ac4933e84220cdc2805576cae293abb3c7df5e6b5e9f8614c5e2480c158ba44b1a04f510faaecc21d072514b6f2cfa4417139aaa0c8e772930ea7f8fdc476602ea8052c397ebf8a05cda7fa4118033718990f39ded2ea6cef0783aaeeac10cbeea7e2f9de6eb86a1d70a2eacc120f13279c897abe01e0b85fbd58f7372e093daf8b5132a93e5c742b931efd64beb208cc7a03e376854c3df6080301ffd9aa9fcccf596f44ed6883716c55fd4cf0cb457c6c90950ea22328c74f21ccf407a65e1f15279702d4d36a17ea6eff6d1c2046b8790c30aeb257c09a158665124b119e7dad64aa27f5e5c553f6a15017eebe50627af7774fff2621e0144b95a4abe1a0c88607237a438f5b18b46f97fd5a3c8cad9502dd6c8ba38ba8752f747e591e5eee98a4135d92d780214a6da81b774d0d780d212ad9ba954f10ab134f956c203946a987a509185c0804b5e7d0752ccf326af5c6719dfe0dc2bd4e9b51bd9afe793b5a38cfeda92ab94f35ace111839029682c0094069ed7305b348cf4de27c02ccf33d0e933c46700973b6bd2f89f68d3ee74681528de5f43ce2ce6b3dfdd5ec9a8bac17d3b9fc4cce89fa7bc3c4134ce2a0d3e5b66400430e24cfa094f711a18673e1769d57b7cf76bced57ec4c94171664cd7a74200b1737dcbe0124ecb9b9dea6950dfd9d6a9ff931ed60467d6df654cd7311590bc9cea1e1d3555c96eaabe84c0730c4eac9a332bd8dabb2f8d8fe50558d74dee756843c5c8f566d0e3c5b271bf9288273451af89524c6d3a0f587cf89b2274d41e5e2f4d32fcad854bd93de2a33714a972f5f93331b8140f5e205d05524b5e95874813852c2b194a412438cbc4fc22fc49397cc683e2d3fec5f3c7d930d328239496d6702785c452143d7f2c4619d9a10fd81ce0fc98101aff2dece3779916b97729741ffa82e46075f31e7021bf24ed4258140106de1a9c2814f3864372bca69d29666e2c2472447443615438dc281df7c10451cf44a7d0eee4273dee465b0e76106dc3594fb01dc77a19c47eb6bd55dec7f2915570b55416699cd996eef496c3f15f5cae5d92af8410a4f35882d4185fa00b9404509e63f9e4057007bb4f17678f1af527a406e03e7a85e9772db5c4e6c82d29ca8b2251309d07550594de366aa0b98c63093e321a2cd5d03c519d08c70223a33a5cd70c90bac06ee649873e01354fa9f76e373c1a90bc96500e548f6f613eb0fbd9c10beb6bbd56b93db823013c6d6257c6bdf46a8a63cc9099f617cf2f033caa9d97c4238dec0dffeccc737e2f8d83d29beadf506446d6b022a8cecd7f1877aa229b8b78caf6b2693d890f99661baedd7a5fdad912b82c3ac409e4f63ceffb244b15ce670364da6230edd80dd836f0334a0133947e8d5a2a7688e3c391ab58701f7fa2d8491a228bb1eec34fdef54cba0f475d1ad25c87dad1cadaf9b0f483cb219ea1fb1835d678df0903d42b2f4b4bd98c6549966fac8ca91e7c22bc0a53bd5b18b8580856416f65fa1ad3aaa149de091794fd277e1c1107e54be40d8ea165825753baca8111c240151749f696163965f7688dd627ab71117ca028a798c6f76931744edb3fe8ae5393e2412f2516b580f552838a030949ada3d0b6ef6775c87a109ab050b63d00ec782402ef99bb67e48ac69750148701ff5993e9129886d39e0fbbc13ed3a271931b9365515a30c9f858b7b97872c5fe0e2d5027144632c25132d61dea169ac395f9ccdab5eb82de22fb4b87129c3c231440c3b61eb5b816238ef80289f6ca1f7d0cc15b76664015e86e113fb041cf69408bfe1148c6a4d9c08d7ff125969d6e057b29ef3b0bc70dd90410776083c8e0a972bc166f92a4edf48beed36d0cbe59f9fc99f8015fd292cf3b6b74a597ca24dacbfb4d18483e023008d1781b80fed53f605436e81584e7ebb68ce1733a4d266599917768b397d841b35d0db3a43711658a34adf205f5ca8c371b55cdd48a00065a52b752f3ad34e8e3fbbdc8217a48b366a47166b677ab5f6962c71a53017f3987576714dd15a512c5539860bdb7276407d4647a49119c82da2f6ea3fa0caf67255033e7cad089b771aa3f2cf116d0fff397c39a261995dd0f5ba5eb626833a50677898fcab84e925f05415f465d4eabc17936e38d5f1a54b7fc80cb32bc509eb44974160c57bdfe6b99a629707250c40b685fa4be15905d1dbf5b6aea874711da5651674adff55e2810b2a9986f64d962e04f5b50d8f8e7acba2450700bfa17785f2993c1c129146ff3193bd8b5944a5332bd7931d5aba1ce8b031d990310d5ade460ea12632430a98238a32e4c98eb28885b4d669517cc7d6ccae426da1e97210034bc7bc75fd6f36a4baf073a5f1d83a0aac3f1934abbaf2d41b3ec62722ac8e197777177801b3c84e07bbe133ee3ce247b77730f7ad67b7013487acb6005e63698e31ac3c81efc2f0989419719ef397db0cf0f40f5d4f5055fa089d8105a798dd09777b1f53d92274fee688a233d9dcf22bda839f350992c7e98c4dec9ed152165988913d2666fb84617f2d188a2b8944ba42b1280f8965c20948e4d2b6c1da2129c10c477736f79e087d4c320cc7a737447dbeea4e73698b8be7ccc7121caf7d1c2139214ba0d57718773eb7736091acef1764567217459774296c385ec997ab8490b6ec8a41fe17b6068e659d901c8d109cbcf1ef068f393827045865c813a52e96ec7bbddd6f7a24067add80252ada99966228baf700cc5dae8c53489b640756bd35464ffc6ab1d7d095668814cf4571783aef312e683085dd29a6c000a68330b485ae999132760a4af3ba331c3cd2ecce9c6f9fdd52148f8d6a17f23d316ac16d4c4d50ba8316688d34cb369014b1c22a89003f21e9dc1ee7755a426bf2b0dd5ac30f1669c1c50cd2edb75d4535bac6d3ada5a05fcb2eed05afacc7625a322b12716af9fc7affbc0da2accffea3f47fc9cde77622f18ce397cc0179e41c53b1c92fa33855ee35b9b7ab5b19d60b3535c14b3bb876f122b1537ff0acb49f6459ee56d48d1d75678bb769e97c0ae1c48bb896fa3d8ced00bec8d1546c4b52809cc5b908b0411bf70415bc85e7b6e5d6ac27c1f9112edacf1ea0cf932248b2f18ff89fb2cf12faecf66d81a19ae64217df7fea3c76c1d08b47fba1c004349c039616a79f14112dae50257f70f409225c950ed7a359521e3b514b7057706c58c781cb2547048be43ff950b0890c4ac854bb332016a3effbd264cf475bb89970ed4ea02db628c1ecfb6e3a37db2edd158c9c6aa32e38fb2994f5e8060a08184dcfb5bf2aca1a13236c8245165b7fc88f220ee62c2ae662ee3feac86ea6457c9b3fabb61f45dc66eb227407287e7953bcb559d3a7dd84580e2e673360248b27e29174dd9daabd728666e2410b90f8e5fe20124321abfda071ab09dcc192de61f880e1f5d5df600cc9e30cd0cb4f3c6c942aaac0b05bc9a3e6bdf670371d7ea54ac01166f36fdb24ed046e650171308bafd893ad5c53c5a0f020d46d7c102e542529ff23b2ec77db85fbf3374f71166bdaabc6188dae23a20454ab23ccb933bf53717dd69b76f86c4c797992d03f6613a0bfd8eaaa175b39acf331367de92a2800cbd5a6e3996fd094052312bf133b5dee5c1bfc0a35ea2bdb2db5b3831cb5d4dd312781a847b80d9b129a4ebda9e48cc9255a2d81febfd1cd323adad6ac9437ad6becf8187fd313765cf08520d2e1802a1dfd18781daaeedd8a7f7bc5e23afe3359f0eb59a3e02300550d1b6d49b441190a8adab2a777fb2e45c325d6a4cc89dc1166f8b0d674498b7fd5258fd05fc79bbb7d0a8b07f8580ad67f079ef5601a29fe38411e6fc31cd998835e53e3fb03cebf0c75a741b62a506f8d90a27363a2c11aced12baa5e1036a639db918acc25ffd10cc22d4bf59b3a09cb606d59f5ac8df2000808433eb0769c58adf4156c5c412341decbaf57caea6512ae4d61e88bebb390457b3c4b1f21d0e60b52ef3ae57b9daab69ce54c0648fe8315830216c6a83c2745b2b3b6e77c54e0af49614af331f7fd04bf5b193fc6571c59d42e355606241d99760c32fd9915dc5b1c0edf9501b7849142c6dc2ab1c92518a76b53edb4b3bf9ce617007a0d068b5736113d4279ced6a02ece08ffb4348a9886a9d5730c4bc31bbdab9686359fb5d5ceaf3c41b1fbe276d2311ed85e210244699864f56cca7c756ec82b7bb147de470ac4544283d705b333a810dca84e58e11634bf61ef53c3120b2ddce2882b02efc238273c44f23a9a2af772a502c2243ce869d3cc5d5e6b7a478b01838efb246b30c22c9addf611c79f782c2a2ab34c5eb03ff5bc442a35b3c6a4eec172f221a44aabb478ca858b2c9fbf807625480b9f65f15dd7d2ca0f0667e314cb243620116a508f6e55c2f34aa3ab615827b787549aff32e834a5fc005c4eb1cdfee4a1dc1ea4121cbe2008b46201ae97ca8d21e79a5dd89102453396c8acd12363898fcbd498bf8a9063b5704a716bccea894a66912c8ae082816a1a9d74333cee2d73587f8904f320cb3de3db79635288412d8a9856187c68fea66f8368c40165e095b71ca4abef1b3ca27bd42a74bb402b7ce45b32ffb99be95b718a2877816d6951f0b412261e13dfcee3ddb95365f7749a03ce64469dbeea96a787b7ca35b1d56e2d924ce8287c0d1f0b15a2ae48bd4d87efcd5426fd2cff473e56e4b5e7ad25e3a0e2da4f38719ef9e95ba179a625d5b31c67d57ce2bf0031b00bd0310db64899100284564f89730b4c32013486d26e0d3891ee081e11f4812c38cdda03e93d2140d6f0357e7cbb06f5f96b67c44b059f0a3db22acdeae07e379bd252bad5f74cacc081cf63f9cfbf1eb0214a8e82d9de95aed6ec2d8d6e03ee35ff1295032e05f64663d13c14ef0e71a913896d659c90018410db98f1e685515c8745003ade06f7aad4aa782a05073f316c2f18acd946b8160ac22709cc7c67b3f8aac50f5304d4676b44a6b424f93c80bdce0b54cf86a82581b01025e08ed223539d4b1ef89ba81c27ebaf0a80a23a7e4846ad9198375045019a7a7cf34052ec0ae4346bc426ea72c77554038d1b4e1f50467ff3b0bf124f4a3f698098781458c8a1f9be218986b0a0b4bd4994482bd17fae4b0b31e7c9429d9c30b29fcef3069a8579a812bf1451829729c5c2ac91db42874d91368cfbb4a3e6fa05b15001a90524b10fa74e03cfe3992fd84a1aec4a7c20fa0f14c7b6ee23f57bcec5da633580362a8def44df76c44ae29c6f8760cdd22bd4aa9ca86950f36d6a4fda6a9a346fdd455d15ef882ea93822350b78e14213f310f10c7f8f299a5d6454580d14310c3735c103b0382e0d434894a09dc7ee0a4b5b486e4387d0d56e692a93da6d1262ac7dd12ff8063c284278191d3ad7b8e310f619456a2f031b206c0bdea7ddeeb09730923994f06c385a0f17b9d07c20111a31ec3f94d708469fefcd92eb830da0a62c86013354e8fd6579e2a25c28433ef82a64aec3c8357a70e266bc738e1324446465f9fcc323a3dbc8c35d570f45bdf30d08ae397f74ea5facf477d1c21a16326d10bace6fcffcc9d61d03957f690a5988a2519746d12e4cce716e70c73a26625a41ac5f1e0ccf792efd413f284adbf311dd0ba4d5bfaf4d5865e2cef4dcb40493d3d2bced782c37d5ea7c663a6e3c95e30d51c3807fe2fc3555f1ddf27cbfd8177f963ac5357cb19c6ccef12bbf8664716d51cc4e7cbc387c46044c77988586f77293fcd414fea93c24a90bb71b0f64a3480922c4f1cff6e66a63f0cbd838ff4599bd82c441e4188d1c7696d68e34270106fbd545589bee4924e55ca2588152a320ea22d9265be27b1face477dc6f907fb485e4815ebe11554c78be287a8a89cd380bc97cac5eada6c35509491d4c0e6e06d26b9c983429e8b5d3cd043def4d37c7cacbc5e34034b4806ec52bfcbad5e496ca88eb1b47f339acd13f7c6c915b0382f4cf19c70577eaa3331b27f1a13220674e2b297ef18ce75982cffa02b99712752cdf45d9e3bb5cc77124e742d2c35e3adac9036c352af68b8e03ea5a658d940dccbd0a1fbfbb4c55850829b53265055d136b08b1da6f34531bbdf2584acecf2327c2d3d1173bd0b022617e077c071fba88997afebf0c6328f6d6ca8586f44d56911bd5d8fa34d3d9052b27999c3874da815d60ef955d38de276a780cc134427dc938c6e160bf510e697fbdb1f2ebe2c9aae78bcb1c3676291a0b39d28f25f1e21421cebb1159df3719530c0afdb431ebc9da5fd049586364190fb535655d14f43857dad95f3c4fe819074265489746ff11ad2df2148d3a5f2e4dfa8485a98aaeecd07bc0f9d0bff06d1759237bb0bb099492f55121c0c6529ca1a8a6773665ea2c04797a50ff41623215ccdc504a8d530e150a2ecd63e13467e437c53562da691f92ed8dfbe7d240d814558e97bdcd1551553287edaa6db36cfcb7925ba3463bb45a2daa8b391ac99837839c7e9b3cd4b32dbcabc5141ceac8b0416c553861efd9407d95444f8048175b88c332a0e6c4183d3eb09779fdeb26701383d5dfdc995c3350119566212456f9493fe0fb7d1b434e86f495ee437f3e1868b9bac28eaa41b35fca331ae49fb08c28500a8a352f722886d639a04cb72b3de6f551e1888afc9f07c5db726e44fdce412363565810b8f994d806c965c8bda1275224e02ed282a539506b409451dba944ef9e088a4a23cef3f79d712eab704395316e4bcb823739680233f9ab7754eee75fe38f00987a8661983c4a81bf2876b8df820738ad1fee66fae571e368b985c44cdf9de3f7d7d906a12515fd88c1bf328313e10af2b7ffb6812404fc5fe709062d082957a4499035aba59fccaedc61925609f6bfc616dc50e3e09faefb3c1873afa640d77699e0f6a26d13956802138f660789ecbd32f95c5ad6219ecde78bd3dd762bc1a5b78bba6852662ffed82281ead20ab7d58b74979c4664644fb197ae760d781583a96926fd589300d03eff081e82c3e2828f2bee50afe2d579d1e38eaefb676a2ac7b719a0d25c6d924735f31d92d168ce4db0bde7bad3bb7a38b94dcf30307963ed523bccc02e44c427b3d25b33a3ce8a3d14bb132e17b5ae3b1b36a0705ea5e9e2dc27ae47b2caeaecd1853755162214556ce58bb81d678f344acc5bb7107bf0b9eee29c00d030b8a1f19c5d0fadaa5b8038daa6da99fd4b88703016e1583f5dd5260dcdd04060245fb351511d87a47d854066f37cdf629a7e9becfbe921863efe0d2e51b155008aca07a4df718958faca1c2324c77af35b2ffd766626f5c5b7ceebf2a96ea5274ab78685ffe275d1291fb3c6de448b69866b5d6cdef272b342214f91d29fb75ea04877f07e17d9ac48e2f82d858f126ce703f94da429b914b90f150d0a99902eb6406f439c83bcb962ebf5969f4260b8934aca512289b412e958ae1d983e2767e05f7c07ccfbdcf3b3a41837ba0034623d5a764f3f7f22c1f56cb80f914c16e0664ddc5356a4473ca2cd3f26285e70ba04d036e64e2a2f58d5b362cf5349c9342896a79b0bd403dfba11d8573a2ec8b777d06899413f3e90231d35c1aa71f656cc2a256852e8bbf2221e40e6da238dda0e5f3ba0371daa92ad2cdf9b9c43f00fc76f2c28e2776c2e28cf3a013bc5a27289069223afeed90b7d98512c311e574abfd4a9eef4f28e400020de5d1f4be4bd9c3ea179086c7c4beb6d1af63a5016fe48315d784ed7f1f914d194291149fd2af6edebcbea8f7781aa368779024eda07175b6b735b11f2a34549ecc922db797ec7738aed32407fd648e8482a6e211d384da191bb68a28f7ce745d059ae6d8cad28307ae937df6875c00f9176e8fff9692f033ccaed590c543637d16b689419772ecb0e7876928680a91a0163bde7950cda2e6800576b2f557a1bc1c2902c7479d69b45152f5a75c3c843654490d43d8fca4b90a42bb980d4d7640136338a587e4511f2a5d9f5becabc4d50a1c052a8b4d8a5545685a16594d0b810676eb11cb9067c14b91b1850b6ea49b25a75e0337bdf6ffdff558ae56ddeea20c1db9828910d71c8034fd05ecbfba07e7a952a2b1a3ca3a9e9d3ed0ff9dee26f486e0347f4f3775499610d4610789f2a57522f5dd1d989567eda6bf441383fef075552929e6c4bdf5be18cd821f4a1dc46bba542a22c6e8583c0be949ae088caced8258937c7c2f9b6b72b7f7b635221fe2a1b08ff5a12ff2333463884d3683a62b06463e0ca0d314a3c68913cc5801f7c35b106d7bf408296b65d0445875bbf6e7dc04577176f218adb7ee5fd7295e940bb7c3f3700b26110bbae3cf1bf5b8db7cff870dd4e5971d3bf2f313b8b740652348043c39fb407015a0cecccccdaa31b1c1cffabbcdb67e71766461034daac06a2629285c66fedea2051c325f1d3102ec51fc87810e8ebaac1272aa8e218a5d343aa62818fc88a49fe9663a513658142afbcce60d2cb92aa0f0051bbb7522ffabe1fb1e56afe697e909e03813185d9e8c5ff4418c2fa3ae9d2fddc58f7ac25151ee479db1d1793c2e25d0a8e77ee444f9f0d005cd5970657b1f48efc7d124e8799c20c19b21b1e23bf1d01c71b60cd330486293110d930e8ab755d08ea387f0a662f1281bcab42f49357d300d7262cfd68d3fa8d7556b4954ab40a88a8ebc763624bcfd1b09b88404932e3f6e02877c796239da43837cf67db5950c6d11057a662e774042eb92677ab57e125833f3f67ca59624aca0bae5f3882b52d87ae14997f3bacd0019a7704f5b37d68c0c1d87a34a0fbb5aadad07e5fc2027af22334bf97c94d7d47ab3d046a7b14ea58fad2cca5d288c3696c26d493d625856d298196108313d0f42862e92d959b495167d784ed488879540726ebc689b1f0e34b943dde61f64e9ba453a35f63903433189ef24967fcff628d9602c3974ec28cdd1f4bd3dd2d69af5c1cea7407df78d17939c134089358bf213d7bec2aa245172cb6f4185fb9d211009e34106dfd8d04d473ca527f40c11def23ada511c4c74e18700b551c5f8ec5a42c52dd099793f5378f4c7a0d97216da856f643f1b110e399f30ea5dbc51362da7f3d8db498554376965f401eb7c7733624372960d45de27e8a988ef726db4632837bc3475f00ef8127ba856e8d52b1e3b3bcd1f545505d7842e515896517f14e6cc3cac684aa0922920c366b98f2d37c7dfefcd083d2b5f78cd67e5cb740179182a4df6211a40e2ecce2955b7851f0851554b6de8c91bdfc2bf119abf33d304d1b8743956fcd05d40514618c854bb5d28c6f270bc3a7a4ac3bce1df04e3be6f84f6a975e7be80e1032c6d8a5f6b75ab487e4343992793cf4a9666887f390c5d38a95c17100c8ce3c1ada947953e0938cc8adff0515d4e9f636bc892970593305501c5351f909b4a5b2351fff341db75841d968a80a085d611927e57044c5db5da9743ef78598a4f790103228e4f1c0db28ec84f5d18d79187e150d594119652e3e5f244c05c699a04a414d7af5329666729580939636d5c7206a5b0d9b0945ddd6e877c4301a24fe08f5801086dc674e35db67643658924ed89218f82b9508869939a8135f311448513712e1ad0b1fcf2abb09a91a2c637c7cd7bcf1474da143daabcc2e3fd60d5a66817c75cd7bfe70b45f50ab5b82051375c10589dd173d5e53fa63b35d9dcc75e0390b4f9a999b44247e9945404166073ea5c2504c5695ec4b292d9a81150201903930a6e4fd9ca3b1ab6eb6581a48e10237fb0c321dfcc8da7b27c5fca05b4daa0f0e150f6a7a81eb976380b05eb7123c6fdc03d4a0585fd2c32500e03be3a84b25a7ff0b904d5aacafa79a79ff80b321eb808df4d281a51571f06136fa6223c87c12a205394b504503adaaafd4e014086a60553725de9673745e71cf29118d5b6e9394e3c78dc8302264deafa4e18ab663750e41a0ad25f3b2d2ec12d92a20cb9a69b88215c829a6e68b139918600905e8168611cd4b8f9dc2ea05d09cfa7432d2e0a05b20e5e7fc86b65832aecba3606129aa92c9132f73e83cb5bfb4102d2fb5f1d04c3c0486f49ee3a68c75d0f7034d5acc34e44d786f72371dd5a8882cadb1e95e5e0df401e55c12a1b133abc222d0e8136e12775eedcc0b36afe2244e031cf32c0594face6da093f307cbaf5871a7c05330efaf679ba7864fd4127435bdcdba9b790f0695d3188bcfe29c0efe738e774678686e108ef41debc18e7c73f82a1268b959f4baf47fd8c148fab615b8c2df0eecd8b185f7f3481f0dcdaa8e2d1f2e6742321ad0d9bfa05e2e571cc1c0c606091a42a1845ca3474f1aaa8c39b2b1325b943316958e28b525c2ee93b377a7ab9703c67d231d5d88c674c1af8a2f52540c703852057120e387236e9c416bc44fa936cfd27aedd04a2ed842ae1f66f203cc39e60eaf5c63d59a70ead56c970eb08e4ffb954b3591aa46b27a0c45c9c02670856bb12d2d2c1d5f174ee21614e8d0ae30905de380ca6ea48d28eceaa9412ca9f9f9150851a212beff6326c2d2a3e6b243bdac6f22a1921691abb42f4af77d4e0a444e6a59e8c3324448fc9e42884c1db844007e06e7e158a19719ece3ecef4cda9d2623750dabc96133c05c1dae080a4215cd067fd8e68b1f2d37964cdabcdbbdaeaebd54fffa1e7d265334f558a9662b4ffcc6f58ddd20201287aad75eaf4096f3029b648881990e1ca31772a0ff2b3beb9ab1beee5c979790d0da456b4a069fc913c2fdb9532ca713c4cfecfc7bc18908f24d6cab836196e8910ed58f500f45cbde5abc257ad4cb7a8a40599b1df12964eb9d90d529e21aca5c0df8f4fd3b92343a5d8319a099b3baab55ec3ffafe8a5e3ac76764a6fbca77857dbb8c51cfaf35a7ae77963caacda5f43bbb286e0b6d82ad1cc7b2b18128d776c73e71f7745e577805e680001284bbe43ad889d990276b6a4db3ee9c32b1539bf0670141b2073e072f12d60d8fc46b29e04b10cc49a73904d512a9d128e6e98ed53aafc74b2eaf78af62ce7676f304d70e344661b7481cb6a4725f1ff02d62ba97d88bd209c0b22a5f4223c0c8ca93aa372089e6de7efd00ab36f97378daa2905d302c8df2c5a3a09f5d7831d1b0d4b00a47588936013c7ef814e8aa392c7594a589303deaaca6b22af2327e0a39ebe46d37e61f559f0b446827f09f5e9cf23bc5427ab9fcda9e49ad6c5b93f2c3c9f5a53681d8b4f849cc938ac2174753b6dcb740ace2ff7d40930fce787b51a9b6b4c6aa0286dddbb93ab7eabb9ded9796b053802e839d7f599be1f81ef7a1ebc22754697a134dc7b8b77ea000e9e4417a4d45f88a1ed14880481efd610b17938d2b730853262ac07ed25e882931f9ce833af6285313306ce9b63f10209db3f0222ae475a365335c8874219d4bb406d1d641f95798e93a4af39d0f16ee7427b6c9666b188b7531670807d33e9cfb063d790ab18a395ee34943e357132d8e391cc110a775e5865e1a6de9ecd71cff10727d594df466354aee2a1866164b94f56bcf2406328aeeb363633516d7806bc89931b460dcadc38947d7ea7060c0c74ca2e3fd3bb1024b476e7f03cbe4e6ef3d94680fd39a3d97baaccc638c4c46e2f1b5f6bf7eed3dbfc1766e3b3080d357a4130e08adcd31413bffdfb3b97ff6cf4f0cacecda64f87105b8c63b3ddcb8839cafacf30fe9862d21a5d0ee8cf7f8ed4774bd34bc14923bcedb8b7dd01886973a1e7b65cdc1256cd46ad4bdd601c99a6cdfcd3ce782fe6fea89d56da0654fd4f98f480eb810d80de9b9c1557925fed5daef81550205819da4df92b54ef1a298c1dfb9f53e62fa92b1e8b6b23d29db49d5891a34f5e8c1e887fd134993e9c60269437fef4e4f29486dbd2c4ffaa89bc9ea57ae45cdd2f10a4c06b4e1840756fdec5cbd3668a164effe56ab15141dd88716674c203caa01a078131b6dd87b2440d2803ed9c84e3c8ae272f37f057de2a5727c23849fb3f3c07d267177db67b78e692103f17b863843e5143ca8a442884d83fc93ce99a15f363614fdb5e6077850d6009efd7cad15e6b77149a70b5b5502ecfa345f0b3161f02e4ca3cc30df47b06c75a23b659ebf7f932f5028bb129a274090513a42f8c287da05fdeaaf084131a445ccf234e76349f823a2345da39ab8def8675de9f402ae1929c9f01d5fb65c4918c8b5130c16553b094f7b4f8a0a61b854ad67075a3d5bd031d5ca937798b4af3b1668d820c320d13295c0bdba0e3a4830aab9b5007879573e8d8c78f92bbae7f6ac8426086027834ac043a52b04698b30c9476ddf0f0d968ab733abbba77fc6aceb1a47918a6ff34a4ac8dce7dcaffb43cc5ea2aed20d3dbe8c9124c9a90e11b47b96941a995a5b85c14d0a2767c59e80af4499b846aa2ba8576ca2ee17542a97520de78920437da2c8ae42c9d53fbf0e6038dcf9c32c8569b78a45ee31f4596d0a1daa8cdfde8bb85849d60cacd3a521f37490bb629a046d65060d6b45010446541fdc03d6ba470019f4b62a8eaf9aa345a1a22a99062c0b268dd1caddeda5a2a1b8caf2170f91242890c7e9aca2b8b4506da29124cfb7a1ed557277b41af368a17b066adf4423d075fde611a18dabfaf3dcc19593fb4ec18930e818417339858892046949aa5ffc7cc8ebfc72003bc7fe5cbb5fa3cd9951e848a9c593ffde3e9d3521d48d3566eacb0b868f6bc1e5c4ff731b2cc0e0bc3e4db803c15b3b284375e7fe4f3bd502ddceff15090df4a5597d01d2a033f720c45b48d8ab0fc52e0f58a4e9c6a87bc9aa5483e86e6e66c12f528ccbd6043af9f7e58055011e0f5d4feae4f593bb95a91bfff86749f640dde577fe6b1554d221c316420e834aedc5b4e392d9bb4a919364eb127719530a549d61dbb50990b8b17d962fbe7e124b1fb5a4ed010526187916772e80cb9f2fc8e4b1d7281417b502502dd8b15649d28440285d79908a0f1da10b6c8f6e193eb46340d13890ea4956f1b685574b08a36450b282863606efa5aa6a30334fa8061f26a4c1ade1cd2e6447ad0ae9550998c2c7d3d893445a7bf35b8bd30d81ea43f7c7f388ee2dc5586c81c6154324ef335b55a1b3b6dd64e0af11af7f8913bb0198de35345c15696968f9800017e303d2f1f574f78f73ebccb24f5618ec807dbf4017ca92eaa8c8edf3c89f949481d8155a545c193a6ff862543333b5d4b93fb1ac6fb18d09cd090066c17b52797a73e04c5a1eeaaa6064eaf6e9078666c797203ccda999565a03d99b50dc6aeb566da07463d4538812c41d1a629a1e2951489061a3fa306399caff8344514a4e8a6753573ac70a50b72ac1a6e8f93fb07ca7e12f7e3cbcad2133c0ca2b6cd8b756c4d0eb72df5b111c47f202dca8a9a04aa8c3ba04e9738e6c4f249f2812889433f88ee6d93d3ba13a88f9826754a1d696186c9a12378c5db5007ffd2806015a99103c5770def991dd8a5cbe3b43fe8ebf63b469976304f6434cd5a6e0d591f0875bd5649352554c0e9b1f730c289aacd16a9d772a31b1b990df978e50f4cd8c9bf3764dd656ca8d5ed78f4168406c109516fce681bf598d9137e22df4a52b015057b3eee4cf343431f8a9936388b15f35614b4a116af24ea9919f4abd65ccbc6335c08ab20a94f930c8f9b93bd71ba40899a66827978a8d7ff744bd7f4ccdf738d6760e716a5936abc91e18f6d83c495a2b7bbe021df76d7c3bdd1bbe5a662f9d7d532111777f0e4ca03924b92f5fba3cd088e5080e012087a0e09058d88367427a277a307a683c1cc0439fcbc146debd2557444674e56c70da1f1d4495bb66576eed11c039c0d41abe3b06fa9f8d3bff437b12d68dcfb28e0ed57b6e56eb563211a825dc94ac3183bc8fc0ff709ab56867b199392b979c85aa2369612624f11de9cabbd975a2afe9085e7130be7be80d9801e42c8a7817f32902e10004dd9a02ba1a0d6cd07bcfb31587294491f280232dffbcb6368abf499cf85733f43ee1018d1a7262e5fba520ef19801ce0259e63e54986065e3e15ed0ddb2e10ae63c5dd0a7e805d3a8760cbd48fe99014f81385c4cf7dea3dd77087f0284833ff26bc8f75967e5c8fb099f9c3750db2bcc274874b0f5a486862c7bad86feadc1154e8ea4e719310c232542cd1697ce640e1e6ceee52d6e2f29c73e913f2d4a670d737b4b7e2b5f6be755eb6660d18705f73ae8a9c289a2418251c2d759ada111bb738e772f17b9032591cfda263eded2b911a6b6db9d7a7d4c497b7484a135eb1c2f1c0c65d29e3f7aaadd546775c85964a48d049ca8c17358de0bad141bdfc50df59f509bb3fc28fb9766db29b0192a7dca2a3107d9580d102dcfa935fbb7e0bf4cb0e5cfc2197381896074cbb690451d8556d5b3b9563457267f6b4326e04982ade10a51187986af616fc498b164feb0f94b88b6aa4b4ac1d837a34994d8de0f3e0eecf95f0d07cb89dc309822721594b65aaedc745f15425ad718fe98090296863988ac06560f3329be962439d025eead6cb1e60bb3af9b599638f1d67d472761d8b7c6139c6e0918ca6699ee2ed0e51b2c305eabc2238a36cf77bc9cf9fc6e74a9dc5784bc77520ef4d0b90801273b42a41103ba2bf46478b5e6b6c308174e362e89161a86ba5d9c2883c0cc66046a5b0c8aba1b01536649d5dab304a4bc6cf87bf4c37e65a7971ac93cf62ce5e6f3b3f89c53deb2e5d34ff6f2aa5ee1de8f7ca11a0fe1da19199e378c870c8c971d2560236bcdc56120364e088342f63faf22181802f674ceafb5546e45ae2e9f4e457b98f9fad0815c08833471a7c7469f9a18c6d1d27c38675093c3f3d5e95eca06ee8f2bab827997e7a4633684bf14fbc7871b1e7798e0acd649def3037be407df83d3b2c8c756ef488afb14af897145ea44d617e019ca024fe69a867acfa638d419dc7fa538ac05365753c247487116885ff73a1bd235c43dc31f0d2ee76dd685001e589ce63f403f2575c6d4a769f25638bb6340ffb7a3985bf053fe4154a6fdbaa221065d73349240ef08df6392c1e57252cc8e56081413349ebfa6e92791efe77ac38158028a678012907832e8966b4375e8cf06ec906c2cc077de264650d1b70d2a5692e618ca3785de61e516aea3f8a3732ad0bb6dfa3fdb474ed1ffdaf15d2c6e5def36071399a5dc0c50c7aeacaedf94dac326bcce388cc8b2afc828709ee1530b8ec7ca760db44457f87596e0ce1c880c5854ab96ffb788da0a1cac66931dbfdd7b5b518309b23abd3410ae52fdcfefda21da11f5a61927c1f25d034923bf55f827b046149b61e2069b4d517114dd9f516680f8a5c4cf4871bd3af51965c540d569b9b39d9a21eb7f7f0ff765f02daf4f2793c781ce0712c9bf154f68e079d989e513beeee0993e85e8f3545ea9723cc61ee3e139f34def49e824ee63d3c32a2c467df889a0deba230fcaf01270498a4433f7520fddfc4c1016f0c02432400f48e04e3092a87da12564a8dd3076e390da1bab550a0d1a44fd967f002829d0030378666e429b4a68e841a8322214bb97f3bedffa0b685eb6990c114130be77d8647c4e14d7ab79e275c2f4484438674b91426410fda28c00056d233f5384863582b15bb36201f6044ade9b2aff5d015184d47dc172c6fe9debb2222f4223382a06256b6986c48b7dc51de6e79850455ac8aaf2bc2deede41399e9958bd94ed70a5a7d3373ab239438cc2dc681b7ec3ed1d3dae4f021409fa5d12a46f814f620ec8f119f1130e2caaa2ee40858e0be0ad467baac91e114abbda7c24b9eadde9dadecafd4329cf91fb1a06aeb0f0692b2c6280633ecaedc27cc1b00687cde0877d50d83db4815c89c54dd0c29d09e91d2f210b22809937ec64c2d1a99379511d3718bb7785e5a1ca7f5f974c60edce9f975081b5b1b045c809822ad76ff5c63f7631c28687a94cde71cd17200d10bc13b8f45da61fd26bd8fa49f36259310e75fa927cc34095690c00bee6735c655de44e26c87813bc5f451b69312bc55d6b57f22fe96440092378df11cd72f951b4d895ed629d3f4270825a02fb9376f7bbbfa3502aacab8edd8c149b9245ec65f1133a2209299f8f1704b257d2c18d0967b555644bb58dda4e7d6863f8b3ee8f4944b6f2b732fcef0a93bc8b2a7edf59e12c8f58f3ebb4da372b60e49ec3e3be4d14636af41261f33daa480c0f8a0b1e672b5773c2d7db2da891b73f3145b23445b186506d10750792f9c053728459c73b6616547bd6752573c071fca702fbed98bec5fec8e469721834d94e497277961e4376b3049238c22e67558c36ca252e74ea2eaedd382c9ecde637e6f0b2a607e9817db2e69bab76e0a9b8f1f4e1b9c5b65b09bf396b873ff050c31fc8e394f5958ba91e528ad0d533fef792c23a9afc479181138b775834df84f214bb361c4a1e77d162e925b36a4c626884214ec52e77ec379b17f4508323e84c6fbfc51509fcbe8cbc5fb88643b705a4ad46d09ff7e570a7c0e135ec4bc78b401df11a50a5a3f3071b348bfc090c213c92a23603057e46420fbcb60af22d972da05c9fc2f425e6fc4066b4cb9cd42ca17d4080db0bf88884898b4b5b71c91c213fde567af12aef7d1d0b27e0160f2d5b81616fac9902b9749b21fd2c2dc3340565240ded55284da926f286858f698d52f27bc9fe769b1f50de07e37727f30676bb9fb42a91e0d0c709a59efdfd31e83765bd0a51e95c90cc6ac8b081a26c609f10eb1e251793578735fad32dcdcc3d9f3857b1194be5957d62596b5d42706dfcaf7d49d541b0dacc6d6c6edd6bc49bffb6c94a9bd0e369563046f2b2810789cfa6085a92c1c048ba839393af1417a7883c8a34c8919ba1714cff45dbc47c79a757cb68356392f3a5366ecbbc97ab6b925d2b9c458ff0c451f1dc97e3c70d253c0b16e3b2150628a85152c26de93cea9afe2c0aa9eea726e6514bdaab8c0aee77ec35c036b848f950e0eb7f66c514a2a43883b0089228f9668e92a9bef4a6a32060ab952d5b06cdb048f3910e727bd0b4f75d98092e8bda5aec41bb4804028e7133b5c6eea978e10a86424e45129ba65b2a835de37a16d9ed3628eec484be8b09c16c7db7d347d053cd5b7906cac86e29515082e9bd5cae58e41926c876d4afc2a1fd1b21cc7810bbe85f2c7a62347c4c11d11b93f4111fb424290f72477dbda43c2aba8b417bf00b206eb107c61b2bc0c4d2c08060be6358c60b74ca84e5301599048590ad424248eca7d51a03d7cb1a4abd423da8ce811ebc0203023addb9296570260c940cfb4c72422507b1c775dbfe3d5c42e20617cf58c6e5febcb9f61be169b069e71c9810e584e7467ec5e23e4dfb434b9dcd763e636043306a6bb11463f92b54b4de767c2b6aa1d1940c8ec9a20518aa22fe9ac9dfdc7604a28f077cc8cf38898ade6addf1285614fa6c03dc6b736dba404b44a2c50c07fdb86980ea635cdc4f06dc289f0538ac44d7be3fb2fbb499deec56c9d62b293da2cad1c14a2692407e596f96ca7441d91a404d99d9b0e092bb40eb6a92cf02e00f2cbb8e22aafdcc2571be374548d36f3d404691635266df42c214036d07b6b97955677e8d374ac1ce817aea6d0320282b6b017926eaaf25feedfffb7424f9e4fad66f0b4356232448e29da484a5dbb95504ce20101a35eb387a62a51e3ecacd3203bc39fbf8de4366c2c637c4634646714223fadb3a32a8e9af9939832e806a7e8004f7808334a5ca1278c28219fccd2a012a1874496233ffaef0322e7f142200b50f061416710fee4e4d0ac36a162508d032f207be9f484589723813746b4ae7b582e8c3453213d1ff11273ae193a2d1db55e1f7419c4f0da9326b9d98eaff5f9fbf3b376e1293e611b7e11cafe92750a0e9830f724a9b76a93a46aa25b8f50de0fd5581b1a15667b8aafdc453f1b7688733e9753e57e14a209dabdbfebf8cb89209280e0c64e092939ab49f6158d0c307409ca99cdb527992e7111dc04086ff94c78df48c621b64a21d56e0037c89e0ef0742447459c9b87e16099c9c3578c2a7d1f6715eb84f3e7c9c88f5ca0ecfd935317ad4429174670ce60810c38c343e991d4fe1ea01b421c45e87ba0ae681d466e6583ff69506f77625678ee4d08233c5a316c212972c3021906f62bb2d112f13572265533d016324c51b3140c1c4408e0fff48cafbf7971b637ddab08d968af3f181b551553bc70657e92a366ef49529ca02e4072bb954aa889cf8d9c5f24f4e18a67517fa342e4030b7857c942fa5c87cd28f2a911a69b2bcad7d32d5ab1b87be71a39222d4a588d7e4dedb5b0822fb90e1471cc278a37df5fe9633280af9c29c769bb65331cfa7e51419fff9d0a173505fe529511b3105bfe5248fff98c6bdcb797d8632389d09f4762188e12315d30541c3398a6eeb0356367ae30758765826ba006383cf05184545920cbd929807be359e858fe67cf81a51d25bd49903ff10033ddf6b0cf569ac93a2a0902a05b17a9a027b7df33587c708ac02ecfcbfa088fef172aa9dbb178780037dc106d35b39b596c3602e76bd7ab56bcb3e3bdcb30e22c9af027955e6791e5e818b201c34517494b20f6307bb164acfccf19c169ec9ad4ebf56a7cd82bdbcde7d7d4ccc9f75c918469a12f6aebc3fd666ed6b4c851e49dcd50952e564581de39214a44b61849394f24ac0b16bd0f48be3cffcee6a745584f9a6f4256e5bb3b4a559074b4d58bad72939a28290a16b2bf79e2ba1a41b4f26a3047d7e6708a455744e39245dfb2ef786eb34344d06375e2f6f380ce04ef599340bfa5469f842da57a538c23a364d0528e43b023a2d6564a1bf11dfb407c9278566392e50c076133adf5c05918ec3e02b6784d92b96051a86b7b40138664c027bea45088aca2caee9f0767784aac34e15367011aada78c905bb4bdca1fc8d7f204a194ab6cf5f45aa208a3c6511ed7baaf7cd0ef80ca7e7ab5a2075a5aa1b312301fe21b3f3248a3cc9ba7dc97a68cd6b2ac56e11b2667532a0d176554d8c97f00d0489a632b4afb2df3b8b8b4f74eeee5380a8e0ae26b4ceffc22f6b30d8fca237fb9da87a0532a4ebfbb406095e5e15e184ff51a3d46db9d94952042250f702ce31ce0654418fc840c0537c4b5cb3c37e51ffbc3330cd8443882e1168a0a0adf0e703b186dca1d6999a73263cf6ddb54dbb4fc4ba5dba6b2e75e2b02878061a51cc34030a69fdea23d77fe4c35c2fcd5cd81fe2e869dda7da050a49c66592a38862970820c7571e0e39704e889763c4200057d8c1375b7e3d0ca749ec0140354342a6ad171300671361915f4367d17b825503c6377167797327b90a28d3eb0b9ffd9cb26d392cf1971c78ac0cebe66dccf72a27a87ff85b0e21284180b96d000fd13fdc62564e127d9f160ecd36cf27e2d16968048229a196aa916c92ddab920eb140ee8ed1430b2a659c33851626f5b993fb2d4edc515b059aadeb6196af19f2d0605da337231785aa26c3e0838bf9a48c97c010cbbd53b2e9f0ea689de79982bcc75a5b0cdf44428c4dfd6c57016b85c864a9b852a5a24d492383f8dbcce80d24a24a6b2b3d72a0e36383f8be66ccea31f2eff572f588902221274504d0e1a182b3e73c09e3fe0baaaa336ce1b22ad13311620ff25ead205860278675d1134066770bfcb19e1761125f8a37f65263b7a9811147c9e14cd72332615e9148d8e1e42bac2f9ca45a5a40e2b8202cb66266a064397ebb093aea2d3073244862e349f8d04d1a524cbe2c784cdd0c34b119f35abec1529a4d5ad45487e4226fca46382b662fb9ab84c22e21769b7e953a884c697fdbcc69561414640e6c67ec1c2a4cad246fcb72f5fc8a45db1c74709baabbcf2e45b656ded1ca54bada5537207304723dc3cbc90fa747dc174a7aa0ff627b309587e4aadc14b54c21aadef5a56278dec30ea295b4d860d5e8d8e885a5932f47d0f01e9525755df2aa58aeaea4dec59e2e7709d6740cc997000df45d2c6d047c1f6e2f9b659f340e449ee279db44c373bf3c349399b4b16dfe08f0fcc6377c649431739387047177abc6b83b7783073d0aa2333fe4ee2142548cda842a32b9202be099c38463ac893b1b19ab3229b1448b4b64b52240b024f38e12e5517e041c44309b5f9e6dc0b7f1d91aed39966f563dbba65c7ba090525f6750790a1adc07a89ea0f9e54bbb9db18ebb3aaf0166fea474c4c265fe77e30900c9eb9703810b7debb8a87f01407d49629da0396c44bdb9b4acb7ab181ee9164c0805b704e391307fd4f47457903acb685506743c7a969f504e0fc4b2749f5ed91e2f8165603bb5f184c2d5f00335189a40cf31b530032b247c5ca9c4855bd3087d4731bf9a6ba975de26fcec61def302aaee08c182c4238eccc6e1060b85ea976a605a5ce41413f5291a5774eb148297ac5644c7a4a0becdb7cc9b35c2421fb2db7d07bde23c8a17d2addbead24860b2013a0e9656998b53a7a9798bb2e6768f307421cffc0f4237e1ac9378c5d957c8d90d5c690de830e230e80f5ef947f84ce9ba6df895a4349bb2dbb17dad5d61cee0061b22fcb03159a8aefd8344b91a17fd591dcea6b04a056c904d382dc2348572975da09600a1aef8132a0e0227ff61c94a56a9720601b2c586cd7e3de21a24656a730d740c3e68e333f0669f6b47ac98482611f505bc57222034d0c0bd1ec1cbc961dc83e3f9ebfd8923bc9bc969c995fa0627961cc716a5ada3d5915149b0e4f359a11bfe62acd3c350341d24c877a962a2ac1b25daa84652cf0f0e39dc62513ed40ca76637e651b54834eff6df8b7c92b59dbceb58e0bd584a734e2651a647132db56f2f95764a8425a1479a18251c17f435613cf39a50ca489f123e6f53efe9cb01ac7211b553eb1b1b9010b1646814b1002d61ba0c75a7306ebf6f77ef517ce10d8dcf22dff70fb6e219d7a4df0bef076edbb1a8e4c531fc6d970939bd2f8f65e18aacfba609614cef51c81b08bd30f09d92e35f4941f3d44b73d41e53235da13f65eb75ed165dcd177e30d8f4d2d73877e2ab9e8f86c8013d11bef1959347ef88fd527f1cce478f91434e06641db95b264e86046555421cce07b436940f0d5f614e76f34fea292710245612745f1e1cc452d2c1735ffc06e68ffbab3c680302f39214c27e170567b2abdaeda0e3e63a25c46754cc68702700cd0ce5fa9dfffaf524fb33dd6ee655b8e9aec8f8551b97f77cae6aa05234331b8328d49a9d276d44cd8ee44ddd70c055fa65321555479277669b0a14d45691f07f8408df1843a6543749e8041a5fba98d93f0ca054b64279017b558b95f60195508b1b35dc526423415cd9fa621fad281abc3c8c6c35e321b7d3fb4772f23038178911a00daab99220a0bafcb1007bfecccf961d045c89cb81db00aa619143b1b2ed5eca3a2d97bceaa63d64930ab84cff51248e6a9ee94e6724ddb7b4a33f4b6e25f29cd4aa7ab5c82cdf73e4ccbd85ef41225743106dee6e06e2ee1306076230b69e72e3589edffb449ead5473302e7fcacf3bf894b1c1bf22f957c0e72dc580779d5591b4a1ddb597cfcccd697cdca7f7ccdbe07b5242a34bd9efb3ed5b57edc86c661e3be11ce35ecc8259c11a2c9d1f2a1dca998b373d63c1da40f046792022a194f28477204bd85b2ab764042772e5f86d9c60d17695f85f052aa53d98a224c0c9ccad587d01b6ba98c6bd62f71141e63f6d68405dc96c11c28813cc323a72928c05eb4577dbe5578666ff9d975500db641bad2c072e3f5a2f2d15cc912df075bd52e8af2a4cf38c225ff84d768f626ba8de1288514db1b29f6aba0947ea1e53ff8cac1a96b61838258aa0c57ae0349fdd4690bc77f9920eab35aa5d266fd38c7d453d22a1be53b6027b4e467bdc8118fa89c41a3f19684d33bdf9cf5ca618ce4b99c5203c197eed5de3407d9cbd3e8e96c95856eff79d91903d6797c2463596ff003e58b57e94d6aa0e8d7580ee9d161ffd234e7e92eab3bd9a429e54d5d19aa4896dad2652bf5cff3e5120412ef3fa0ba9f76cbe5d8c9143a6b4a208a51f62ec5232dc2657e63af2ce5345e8aa0437ed832fef80ef742d05f602d6eda0b2da3cff262c8fde29fffb09ed589c1b2ab0f0fce97d28669a17b020964ec9b2ea253ad86ddd868b47dfaf90d40d953b62b52952ad15ab919145974843a43160c6b50ab5c1052ebd76722d6e9d0969e5186c87279fa89d24c1b38e01470ef930a341dac6ed0266c7e61970aec541c15b637aec45288db8d63a3a09abbf583d7090770d5a80a9d4ad782814258e1e2b8e9af6e1773e617e467c5b1f03717272e01740f0ad95cc6b9ba304781a759eb837634a88b8c64905012089360c993aa9c293c71fb813575557d9a56154ebb59dcb3b2e5d9291484169df2f80214a48e7d3f53dc7171f1182ed80c141f756963ecac83b83b2d89e3dc568fc5f2c1438aba184b769c15f212c1b4432f9fbe9f152ed3c822f3fdeb391fafee10edbae2f438926042f2b9033f9e98a88867ef62a3f89726e98e447dd9a35af658bf0c24a9ab7b0f35dd22e1a4363dc52a38864b2f1f63e1334a349b9728e4efb8a74185316c5946b6ebe19f09cd404884850c5f7c8c6173c22420330864dff1b2850116c991140d357c5b884f8cabdccff9d5e082a4e71f613400e6cf84cf9737e9e2a1187975a04ac801ccb8057f370179fb32ee2bcde0a1afbe16852affa8f08b08d2b96c3575e1a87218543a55846b63c40c93ca7ef4707378ecb0655a61f7835bb47c59c9fecf4884d43be08266e77e040de5322c1505873de457c0ade004e151f154c4b7aece70ea7524fbf6a53356891e1d72f0a0d0e44f1acb6aea8d6bbe1e09f37f1dc21a13fb9d243021b8eec7a6751629a752d9ed73bd9d7e1f91d3ea05a53b87c27f860aca22fb969fe2885eb2cf4496a174fc81078fe3f04f3d0c99f194a3537a7183985863084220d3415f463150748b1adf45150f65682031a8faf113a531c0db12ad8963b383d0a756afdb3dfb5dbb4d5c345f33ee26808fb56debdc1a59296ee52b4d9f361293e9e32bb96b47e9016db4876b65a7620794b7289d2d0f815d841a2cf73c144ac94a246df8095681491cdb19a48315bbface31933245dccc53bf99584d7376d51bdd0021b511ad7c6f552e53f29dacac880728aae07d28862eb00dbd73c04ed153f63360fb5b12c9368672555c9fe2870d3840a2c89de8ae8f2bc097566d2e61807c6c3a4e08994c7d6c0fc44941bd8fc2a0d0f6f9e6bdf723d2ae698aa296f47dee76d7d68ecd36025198dd96d2bedc2669e764f3c7deb38c298ec48f8d65d7ab5898d8134947baa73d6df3fdbb5c80dd2abffdc8649a163c374a3501bd2d87253d6f84dac062e3b0dd6d4f8175386f66d6810121c537703aefa397a0fe8e96a88340191e47286612c8e90e8af2c91518e90336250af8448fd97fe9842a16d988ac421bcd1ab604eca07a312a029c3559cc1b8405019ee9ce4864eb4417014edd079d2c36ef13eda51d9bfeb4b8fa7493705e6aa51cfac0fcf0cf3048904b9ddbf2ceb045995f8a9fd59ee7576088a178b2f853d7ca9166b0b911c31dd0cc3c19456322b9b72519f30364b1fceb2cd853d367174a1ec5caa63572f00709466705fb77c2780d41248b811f84c610901055a02c5e57568e976315e4152c1e86348b0f1748086e376b3b7f1b402cdfb6076e98ee0c389154588102bee37421144c85bcca5747758588ee524a9185ad9d793f283a603a5de47ba00faf05d2eedb37c66b11a1af3176a69123cad9af1f8fa0ffd3ef41a5ec966d174739e3c19f5a03f747bfcd035b3f036882af44658901a1bc30337e7eb040ccfd4f1d0de6d1190735c85ee0951c2a01910aadd75df129323e6b8e9992e0158bda018bccb1f8e6dc4e1530077c7549fb002a8e061047cc59b21e6f564f5cf29980cdb018e0e24816264ffd547fe936d51c777b087b01b10608cebea4ba80b63afbe639c86fb1833dcc4e1a420ae607d569b284ad9498c58a665db5207509950337f6f81fe0884c27486d78e4206cf7fc60d0ca8011d20d0982b7d80a8d7090ac5ec517a87067ff46db8281e399e95251154f63b0155647198f256d74c98b935a3ef493494fba2a492a433a4186c98810419829f22c39f34c44e83c60adae3a2f137448be8f77740ef6c834a759b5ac601fe96f104a5e5b247e3e7fa0c57f3431b8cfe66b125507ea3bd6f18428a300b070d914ca50670d27e635031fc515506676bd65cef5f4d9ae4cdbe20526d1294ae7e6c03a4599d780223b7afd309669c383571a89a0279d5dddb98a47b8b1eaead146253ef99b76b1c9e8dd46cc04299cbdc89a2b1bf2b1f048fb5a16a290f4d05c7fe7eba477a88e6a12c693c4e89e59a72d5050632ac0438bf689c6dd3254e6dc58ef09e1ab851ca575c1eccc42156bfe4bf0cce4c7324c3222a72a4b1a53c405ea2ee4ad016751e3c1c7fb990edb093f1024b8ec25cf59b3d3e90a8d3db0ada35038dd8e6d948a500007674f03467e1a7ee6ac8924222ee6b5942c6ac69988a3be3dacf86c1a5e31413c3de021a2b2c9cf6fcc3fd0347488692541dc9fdd128efa76e1180093ad3d2eb149450a0ff77c4f3b143582576138877b02652cad86b4600a33fd18bc9ef316c20bad6b1fd0d59984937c37a7f83a9cb886cbe34ab1f1d966cbdb46dc3f87e2c68d71709beab4f2e63650b2bb8a59945ba093d10c6891dcca99323192f8b357345b363913016143fd82c9203ef2db2fa41991f0541dce63737d474306578a0cd2b572bc4c208633030be521d534c1821f0fc95e6c97ec778f06ae7c747bd6866a63b66e9617796b2a2e43dbc3aac0e58337e3be80c7f18fe6dcebea23569543bea64eb43854e2cb575706ca3d3a4e1430b035a6b407b35252f53a1ed052feca223cd10123d0bcab97e4186722452eec93d0db1d02e92fdaf9f4753a919890092151530f64875b075674d672b7ce8bf49943a569d4eab6c343d0c4252c0475966054c95b63522a34efe9e6802a3106bea51f9cd7ddf0768cc7e02d877f9fa9ae950efb05a29e8ebbcb3a287995c1d8ef103936d5a743e2d91fca46088f103238d1704db9b5b8a80e59734124ba87ca4c4fcc9b4891683a305f61eded0b775fc717c2502afb1d99cb8ac9edf78bf022fffb1e94bd569938d22fed92accc9e6156fb44ad281f73ca9b5ec470dfc7becde05acd109ee1f8bd95d66137f9f707d1ce563280264281a654dcb00887cb6cb5e2f79802a5aa714508700cf8df4b6b4b05fc0e52ad6833652cea4f987519f344457e4f0907a02009821029a9310ce259b3876d6fcb1ef9ee1cc73f9536f5c770cb07f1cb6aa75a2ca1367ef2b3ce2728acac07929ab2c76ef4e6d94d1daf3a3fe175b8bef6310455ea30e8979cea91dda18f6d8ab14f3c7604be7a5cf6a393e64c6da088bab099ae1d88827aca47e60781bee8b3cb2301c8c7ba349d117349de5926d20545e3286a97653acccdcd9961ecf52b3c95fdcc70fc889ca22412298bc466216c6d168a11ca13772e80d8472df5e42daf57579a8079f635483a5cc22be32fa70fe053f83b73eeb6757fbf9a89a8bee0dfd81cb381750140d758b963b53d6a0cd9ed999bb56a336fb5fae088f6cc90b9de72cda0ae0e412571f1ee9ed681a90d8f5c50858ed78377fe905477247642e5440cca3998815b428b198f84c41b06a43159c5e4bd6e317a18182181ee0e51bfa0b6e3c7347a1dc60ed39cf6b049ffeee90cb7069c9296b87852d3b0ce1f5f5c8be2f827a3de0a34e3c2493471c53e387943e94c96e03902b2daa054714d6b60d8d556ffb68e43d5f4cf6f9866bed815ba1a7384d03aed7e6b3b99c3318324fb9f8bb661d197f4a2850386029b4afe6a9bd71da4aedca7ee52519c75c7f905db7a42c9bdff5ac7ff02deb490848173f9d0b0c3b3637a8bb76ffa615dbc6e1a2790c87934980cbc59b3ce9d52cc4210cfded9e7528a73ac1ea3d8c87931db9ba08e41e92df7a5962e6bf5f663aa3022fc87550dd06a40831d5443a9ffbbac7c18b4eed8ca112358b2184352848a9bb288c4eee0feeccd7c8618b48c6b5697be3847b7aaa19234f0e734e8ad58285e9c7cca60da34ca8b49cf7b0dbfb2adbadde5f99a854cdec2bb7de8f4f497a2801d33524a10ba78688e3727673cdf25b4890826cc9997441424e4f95a2e8f61dbeabb47b3d3ee12b26909cca78d023b76461ba7f9e02f80d0016ba7cfde18138c6de8448330c26fc044008cafd279e8f5afa8c9ee0105e01ddc5e30169d4129039b0131ea8354cf32a50bbcd2cde6854457165c5fcb8c5ba3186c7a1605b3b2b5cd8f95f9b32dd10d58e80c8df3dd9532ce2b255852d770b21ef49413ab9cdeba710fa1d1256a088a7047a7614c17cf312fc37f5ddce0577df74b1b83a5448dd15b4665909b7b35c10e56cbb77c4a8cd5105807fff576fa059ec9e1810ba0889fb84465c6ddfd99cb607019285cf6f8eea471a50d13819aa417af064ea05df64dbd850f42e095110d11abdee58d2ec9ea15783ab15d90489efe237bb83b4359f374c4e210e574b57bee33bff63d2f5016937b734c030d2eb4af356c4409158defdadd6d2a8412a0815a538ebb37a2bdec2e4321900b70e55194de8b104af506f5ebfc61e32b25c4de49224659a15923b1bea8530a6e0e9a5b7b90dc23f9e6a23136384fe144773df82cb85959c50b3daa2879222f5db9f5c33fb76e468e384042ce4405c481bf87ef904c87a77abb6ec0ccd46711902750de72e74b119410ee120006e3b15672cc20f2e593670d528459f50ecf6f6cff9c74bf0a03662c935bebcb6b48a70cf029844603d714aa73362d365024756367920d60d90a1aa79551cb701df712e304d91671f1a50a36924572b8746ab386a97b9bad344cba690c3e51dc47083f523331a44985ebe7f536985ae8e133b14c38ad59d779a527ba43bad743c71fcbcfb386c53687c6b5b68d90f4773286fb8eaeb469aca368ca81ffe5b73efe8d0e1f4ad97b72e1121aaf5e1b26243702fc189f82cd04e93119caf3900cb4fb642d68325a956efe539d7a8c2dab4b1a3ff8aa0e4801b37b227f80e5186eba329d954ea2c85a6096d1c220e328b89c9e1711cf020e745fdb143885d9bbe0cbca662601dddf327f05eaa2070ebe4cdc9d35c65797a8a0275f981e2af039b119b2c8db02c21df65d43880f965bfdc53ac89e7d3376c4f3650db4761bf19661806384ce438b34bc03395371df610f85ff949ffa869318d55429ef0eae0dfd42fb6bf8f79ea810531c90e486af3acb4c5ea2da968482e7f0f62a62ec396a778ea58597fc2728b9eaa485f340e3fb291a918e551256e1ae2bb52d9d050458c7b510bf8402c2c7e3432288f7f9b4238d2bb20016b8e85bb6d6fd693253356ce5774364c4590742064a572c7a3c5aa098358a8dafb3bf033def06832d1d5ef1fc38405481414879c69910b9c819cca177a479d4a96888702923d7264a138c7753f05631ec39062352e70d2cf30fc8830875706a83c948614e9440c35a5d33958ea7f965470010f441a7b1efb872632f258960d37e84b477b5f7a7e019578910f195143974c8df858bccbc5983f631b75ad8572ee3cc3e9d72c7ccb9f2423bb9458e7e5f7aa3a29618d7289ce4dd4db2981b9081ff71d7d190bd595d71183d1e14ba411e6a99dce1b9aca18bedf4d93c184e8dbcb2a6ab5b70834a4a880ab655cc2caeb46ca1e2963fb5a87ceb1c9bad15dae7cd36a218fe6c07e41ba72ea8097c290bfdc7e68fca3803315d42845b866ece821cb86f5d94a535a869e50232e58fedd16090bd6fd6b3650601e3eb59fe2609b9ef1efcaa4b91567347d0d270c267c102bcb35710868d08f2a7c9ba4e0dcb51e036af6338a3cd18c62d2cedac9d8666f2c4a8fc183982e07afc8999bac4960243728da7ee78b5f1a121bc626197d4c5999ace814ed6d510f7a8ac568f46939807c473673df36070672efd6ceb1614ab39e63b939b9dd641c3d7376ea7b12643eeaf0fbe542068ea697fe21fffa4b1b7aaf7452c903c186df33e5becfe2b7d1ec69c74114164abff44f5d1919ff8bd6c3b941fd4337d7c9db90ebc3e056318b07956b59e58f56403096a10237c459b2d41f3aea918887187c5b3f21e5d6ae75f8b7b8dce880023661048ddab98578bd476ad91daf6f4076cfeea0c475d9a228ab39d78f19cb64f1eab51b9039932fb03f441da6a930bf1c04023148b35b41a43f7a06e6e44afa2058264eb62a138f5f578e862974d6f612b3a9f2e0a13a9aa946b0c53f387c73c7c9521d010e4833f4597e14156979d8bb3ae71d0c9e390d87ce13b10bcfe24eed396441ac70d96e233315ade09802d159ff7ab34774b2a1894c417a17996d7e806cd577c0c26d68d2f4598e018132c5079befaa13b9f0bad726c4d4ef56a568f6f7dc97500a3788985d92ac3b7188cbd918064398359057ffffdb8319765b1370cadc63f38c83425c9133b02fb51cfe4d50662d0ab5d759070d3af0974f9f70c196173eaad2de69c444e4d1c5a2791cd82b7ae3e0a40fb5e5cf3a010ea039f06cc48c56fe221a973f6cc4a3eced5c98dbab6b5d0cb42bbb51a1e977a6cd93689c4b8fe931ebbe0b362937cd4d1b35cea53a5ec68a2982444baf10f920c4900d2335d437c5a4842eaebfa5aa8a8f8d8eb6e31f8362d540b4b271ac3c3f5f92d325ebb2d322d525850ee072965e4f12c1898d9cfcf119279cad624e4bbe1f805d11a034e084648cb25f5d9a15f79ceed13a42a0a13942c75b155e260b9154ac5bb11eac5177c542ed3151660cb4c69154b559f33b42e87ad657e8a7c3695911b37f0aee909d94c292ff8cc22804bcf1b564024f4d9ce54637f7b1d54f522790ab7c16b011f6db892f834138863a6147795092ba428108ceec6b2668563d73590cd30c088aed33096aa8a0521a352db52de555bf4c47f37176aa4a9b910fdd80600c1b59a03b54ada2c4344afbd5d2e81df0abef26ec60f9eed1a7ed41d2133cbfb755f997186ed6d97c805379465d8d9a8bc8210123a64085e7ddf2aab4ce26a4c51947866f58bc4a39487cd3cfb67fc680a398a5309d3cc825d941e82f6ea9785425016907b182b82c1af5d0fc842ae8da153f02f4569c2625491301c0157518f9d94b06f59702cd1058b3aa3f1e2807a995e45dac72e7a307c4e29c99d3745544fe36609be2197fe1459db07bd62d0bb18d7791029dd101b3eabdf122c87add51679e78fbe0fe9493e5af2d16fc13ac4f254ae261273559e80c26a29d1833951195888d0005aa653fe4999729c11d251a4137606228d9c0b0ea488000b6a0301f0d4ae8b6165a049ae365915dbce230db9808bb05989b3fa72e6c025458f4935e16c51687b2a359f583db8c4aaabc1c63d5e10f5053af2da98c534c07459200a30f8dd036a44aecfad61d7f2aecb98371355f82fbe556fe60f9c83e812f4859992a15ed24d730b2d3c8ab717d3e59dc72301d04eecbcc7603e7ebeaba16793bf60f3d098d85e8e580461202395f0b0074a24cf8043eaecdea816e03e9d36df5b8392ba6665af7859085287372577ec27b22443958eec1a75cb246119287faac1afb8003a87f992523eeb47bde3b493e00f6663f694b6a471b48c93e49af24790663e937c356557446bc0a7fd56b024019680f4fe1d5f22c530839416769184592715783a2033b29e31bb8d531c330a6ce64be18d411a510da5676f6c6ca242ecf16196edb05570769152ed574e8155d8cfab667cef8030f0debfb24bb1270e28fff0be2271aef49e9a2d0143ec9886bd0d097fc5cd14f1142053ab8ba34077ab701e9649c2c36d54661c823b3f2b288828db9163c570d60bb6880ea962e23a095617349e1fa274194418041a3efb11ea8cf571161c06f23af4bb6bf5a962df4af299e32b6e13a5b507d8ec3855bc6300ae82a0069366a2f3402ddd2cf8112cb1709350bebc615f5b7bb8729f97bf39b2dc3779b53a2c18e991a85bd975a8cfa3eef56caf971f7c4daccead1bd9c442dd09dd37fd1c8649745a84e48f1c50923a67a52c7c75c904b45e73e5f5f840149c32716e49b13aba2f9582beda7e59b01e70a4e21059fb5052e8d59db55120aa309c28fffceca15a3c6859f6e3e3885550c458c5c898534a084ce8b13aa8ebaf85a0dca51d6dd8b827b43a9539c84443bf92b5e4dae4296b917b5c9310a3378bba502b20e84b217d97788b75eeae18bec64097d02461e76c76df82cecb33a4102dfbc31b58788090aa1d0d86cd0906285dad18406cb8295fb9ecee2be1ee9b7e96ffa63818865bf2bb5e083a159d0c1d5615b8cc7ab3fe3e06ee95a0e551d4251de2e1ecf075252d9899461ab1c4d34552433133072d0ec93465a7724135112ae8bd9e87592791e38241aef66dd923fde28492e614931915a4d7641e630df6b346f875008917a115ee50512654dbafee28ea1cf775e45b50be4d0d47d3ac7b29cf3fac090a14d8b1fbac218319c483937e676d52a8fc631f8cdbad43539c2419366cd9a8759eb683113c44272bf53798e0990953be756952a1f14741b7f1e0ea59b4cf3cd980258298104f0a9cae551a8b8c84da0129bd2fcc67853a8a15ec333f0c1a6a0c32e5b08eba5d32a5c2b82014f407a135070f2d2215a320e375d17139d0b65529bc891cd19f73a97cd5ce3bfdffe6d898151f742950336a29fc2c1a4e7c9415ff547f09009672c7df92898dc60a9850d2a95f9153b9d054993f5afc5e2eb20dd0ead79a0bfdfa200affa55a3edae26c48ba2886248af66ad50996c2b6104e031858172a59d8d37e5244be16fb83101a374c5a16f4370183cc732f9ec0bf9f493f25239a00115c1a34ab907c0f89e863fdd9f83b17e4af2e7690139b2a2dc76ea05d782af3a017e2f04bfae810f6ddf71ca702accfac162746d994afc3e1f4a1251da1208c33ce609ed51b545bcbfd43f91136d21fc5a95d3d32ddefba2676c2b7a26d2d393ee9f9e514f153f249450fe3474985a1e902e2acb6a2d14254b90e5e55460a6d91987ad8e7739e897f09cdc0411404764db228651a0d5c085b87272e5777cc59a432114fce4a3f674a6f6ea15a4ba18432d61b8413817c03991138f0253e613b860c3b8e010a4fe85a0f41e265e447d45d0f7c001533cc1288700e07b00515964c67d6049f22f018efd52d7ff2c989f44fd5a45e02106bf3f9c1549659f91b30798e5b995e8ac591a7fe1c155ffa4180e9c09054357d59b86a1ddba3efbeba671de0f40e031f80338c6f02df52917de3f391922da2ac287da7086e969cc24e99943028270d6a68ffaccba6b67d012cb9d2c1530fdb117633c0cd7ae9534cb1c1c9ec6babb78da0cfed5c43c5af05b2d9606b684061e3c7fc7874942268e6ccf5323961bfed6d1f637e0001514d826309ac5f02faac233601ac835983f499b2eab2f8d82b041eb5ea9e06fdddf9721a53f2a88ad250ab949b10d8f3209384a56bf04666aa1235791507cefd7978ac1622a8f9b8c50e3eae2665bd41a955a16b20d25246220bf5fb2cd31918db9238ee9fab930b2427805585159d512abc667064ec0cd9bacffea2e23c9477b3c9c3511c0de418b72aaacd8cf9f9c6bcb5d0e3a6307cabfead4edb720183dad38a51c5c599db43fb6639384f53ed854337062052571471ca3e4e2ba1d8722352a39232dab112899787f5e7ed363b8450520baf943710438c27e31bc33a87e7a3db1f76d06486c0c87f864074c4b83316d13f62a694d760ab616460d9012c271506ddd0762fab9623d6dfe4024bd88d41f80b90345a2eec9b6d50895d7f128d9de35e0c132a45ed42cad9fbce7c71afd66cab1781c90f4139c542ad296371533abe14bafbe4694d6f45b9654b409474fb167d28bb676c5911b5fcebad9919ca7b3d345081a07563ae8a58a82e585789475f332c65bb970b01671b016a61a21a345d884923471a77c04096d6359e1de1cd45e2038b72d79aa7596b4a1eac548e202ec527bd43082f88b08a1810b2df05387dd7027b770527ac794d28fedd3cfd084a2fcd58738e2c44aabe1af9292be0daa3584b6155e6a45154f99a33fc815780d1789692233d98ec268b59513a11862e0455a6f6141c2da8b82ed2cb144ac90caf4a2e66ec0782c2b072ab107ef9253ab44cc54a8724b39b051f21e6e07cb3471ba6cba7ce6e5f7ac70131ccf99e69fe87269bbf3503d9b87670c1b84becca6f69899a51acdc5fea508a54d67c20fecd8b52ae2f1d172497821f99854fc53359c2180245e5b2a1f948930141227601ba49db0b13064a8ec7f87bc1c9a9dcd3e3f4713af14d0f1856e60f190b48049879daa8cacb3b598d2b9ff37825e46b03360bc3fdd0e45a3e3decc0ed92d755906df8dcf49000505754909c34bb37ee7cdadfc9949efe2ba489b4941eaa23f32aafb38b7db8bd43d26473324155bd7e0c8c704927e4d62f794f2e9474118fab712851d8754738d2699ad1da4c651c3b4084fb86735a1357c6c49f93302445ec55c75b892fbc579da1d4e41132a37c98acf8df95c11fb9529f6387603824953569557397e4a0f9cdf0a67f0d5c114d26abdf4d59d914b0d8a3655e9727c4ca42d5ac8d0cecb5c0f343845858db62ae2917980c5bfaefbc5e6f99bc554fe31301b614889a1ab2d3805c184f0838e25bdd1f557209c60673ec49162452f4550197d43a4384edbc3d6817283e3cf63ce221d6653b890ce7ba88145b18b731ac97cfff1eeb8221ef0932c412bec27d7236e974a78c4e50ee4126b6e0a6849370cd4025eaa1bb9343ddeba273c85130b4d7573297576cba392d55397656169169fe8d1a7132bdba15b3004d4166f5c7e76da9c7af9d7ac00533dfb1b6ac39f597eb722c0b9906de6633dc4c415c947e02ea3e05ed23c99ac6ed2f18a4b3587d5ac5c66928d4f266af6959f15ba087462b6960c6d9a8095ec65a3ee35902cc2bc27aea5876f4ee140c7d39999a6ed27ca79b24a70c00fff275f44e5f6d2ebb6ba547003e785c0d3889a090997f7812e1a5b45d0478f87d87672896800d00f23992c1dc13f96fbc50f011c3e1b329d3397d2a2377d73bf44842c8ca4f582937e0d5f76b9ec5794c236b840abb2a53772cc2477f6d91ba89d02e9000ff54ff4042e3343fc4107df140997e241bbbc64126562f95ebcf3ee933466f8375106c812b0eebf7f510da2bfb53f09623728d00618f707190a41c0ef40948d9551a76444a1d093443b2facf213c7ed15e63c0fad10be9cc71ca6ff72c74fbc1e3e5f123d8bff62e9c09449bfd37bbbca1786e892e7030dc060d329db00a119b64a4343b965a1bf60bb749e9a366e0b3f951b3a7b8148e5fc8efe784c933904d0f70bce1b812dca103b02c7024fc94cac77cff0bcf095c03624ec5346af83d5fea35bb234fc9a5578e0ce672a6af23148efe9aab45c7acabe04df82f66bc88a7e235e771b521400cd64bd40e049d1d0e47ad52a3feaebd473684448f416dc0bc1a63652ca6e0c9c577bb34eacf5b73a1e95ac46dedaffaf73cf0587ff42784471a049933897795f5f4af335f398f50950c257e7c5d7e65855e4b0710768fc1da48fe3f254936059068df679f7623218af4c903bd7b95cee8e9eb9d86cad6898f4f353d77581d19dc3e0422c475a23ae9d3a6f379dbdc1900127c384149d07378650bdda561ed3d88a334bfdae82adbb9786f3edd81a2ae9bae124a31b4d1d5a79d57cba98ac0a42ff1fe3dc20e2325779f8d89ffda50e331115d8a7133f09d2ec93f46449e4b3316229a0a6b84d66a8b33af7f98657e2d7e5476399c79e52b9957488c9f183453232797d80d6881f85e48e304a88719ba8760e8b3551fd0ccd9ff0e8085fa8f16353349be1bc71b146ba184d8e20ed007a371a4015e70430d6aacb59a1de5d9612e1549f878e5aacb9ff33a6f6f6e11e68d09d662bccd12ca4ff71e8ce8c7dd3e42deeaf0146de8985bcd985b6d76ac19b6c690dcd612d1c0c04b65590e0afb6b6024de1af1d22dd5134945d1622bde6aaba68ff26a2c327ea5d662148be7f3772cc00f2b052b9999adda2577e255ad5b61963eb28a1bee447d7d83e15ff621851348b4cbd3e1693c8354aa78c6557ce4d7da2159e1db4542659d5d230509a09f418b4f26bb06d9a839344328fa44700bae7e7edf71f94cdbb58e5d969394a2a709042962b030666f8a2051c04dc9843028d1c9d3ae3fdd9cd877af9bff9c3e11faeb124632f4825417f1afee8c0cd6cd591f83b29148ca965bcded8c88087e90cd580a1873ec6fd78c1cb7e3fde67f1917f6f66bd87998f56b9c99b9b5ac3c89932d4dde07b7bce0b35323de030002f16f19cef5f33c829739d4c037fee7f8135dc795dca1fe4d2b7dd80fcd84676f601b7ff6f9a029380c892e5b1bc71ba3b8b266cd384dfb687f6491317189c60ddb3ce1616361a0d01a93742bfee11e3a53ceeed3b41c5b9ef6e9faa8e6fc8eb49b3bbcf2d5bebccdb9ccbd21ffde6c8ddbdec8c598d3d5306a9fe72bec49f8da4e2e00001418b0225698faa59a0c33f2bff7b0060b2ac67507c00365312b83e5e3c17abeb9821455d869fd1e9649a696fb5affd680dd1ee9219004c3908b663d1a25b8123359d78b4f530e343957bb50a762ed75a11e0f85d5b798549e9ba2f3cb8568acd4d7aa7cd41e92c1d8ab0f7391949d6c1138ac0f6fa084d3ca6d6d3efc76173dc6a9ea884bf9ba8a9f58518d671e2dfb7d272343d1f424c08ce484ae112aeb54731cc9bf37ad81238fae65922386807fd71919e9ad8f156a4cc917bf93d5d17cd697a5c7f20eba62d18bb3725d3a78262baad6d3635be00942f04662fdb2f5caa46cc2c63d7c8188def0df68658a1f9fa4b2d4c35ca003a682deffbd802d4d4a421d9e87476b98692af946dad3087a7173a2476526f3d2bac655a4ed9dd9ee4e9b21b5a58874acb4e28a19de395c196e9149046d7f571ff59448216c355c005e98c1a083b819dc26bc7bb630ec29010b33bc32cfeb8bbb411dd8c399636f1ccd758294a7de602d83b47a23f92f18a46b1a06cefdd21cfddec92aa46e85abef5f99596993e08681f75ead1c9159386b3e98d8af44566dcd08c634acfcdc47b3b8fd6d65d78103b88720c866732ea916f2ee83ed25f1d5ad083c36ecea9d43e1e52b7f21f4ff1eb51173485d6b60cfcc9e0d53fc0c6990425a7211632f1b997776c202a301f16c2c6c00d6f329c73e3c1fb2f3c206e8e06c13356fe161fc54b4a19c308d5292c21328138e6877d28c5652ad43b10456caa69823bba7e6d72a4340dcd82e3f605e50ddd3ed39d828daa2c808ceb276e2eed7f86f708f020ae62ce597a2a191e7cfcc6ed934441dea1019aaa6527b14f72da237a7c0d17342e54d2c1a94e3606faeb6d428ec729ed112b62c12cf2d4ee576f83425781c11fc19818a022672beb3cf32852d8b107997c993db11d9c7ed99a50c79f6a7c0c0c56c0c1fe5e7dce7f56c90c6c3721e53860d2da4e3110f33e288b7d65664877eca55d95645315ec831b2ac1146e4c144eb0cb64861d63e3d183e9c75d2c17778a8eab679f98d1f2184d31e6b34d4f9fb5abbaa7a0cb96ec1bc4284c224e667978d9be0a1e3f444229a90bfa653d885524a5d653a63983d8a41ddea8ce4756de97db3e9b89a1a200b6720a41516b7f7bc4c81fb994dee908799b86bbdd1f52d8127d62a9d67a3ff5c4cb0b849bb44cd860f053ba3d581c12ff54a912e275c3b4b95dadd506bbdd2c4b460cb44f73531fd21122f882566cd2873d8d2154039a8ccb39ee73204bc855e18cbed9f91d913d6bac092924539d8f3d78fc8bb88a037684f9ec49e83b1d9ee5070f4f367b0bc9adab1a83dbb060af5bf469752d7604b7bb6d5b3adad15fdd281648d6181ebd3d02442268a317ff3b48b9bb414a7d1964808d2023fdbaaa842d830dce3fb5087da77b2db4b1061bdda05cdc7e5592fd48163f705dae1fb6d8b79f925ce06901cbcd7d92395644846cef6572a201e8ab540a9c7a4e285c4f73908c72f7880fa0d4a247598d1c1c4b79e098eed07727c4115ccee535eba382a79b1ae8e440fd2260e38ec1338e94df2b9b5158257207b2a825ab199609e06ea6e272ed621fd6d3c34f4ad4ba855976b404c28c8ca170773505e474f5a01ade2890774e44a592c3afe252dfc1d0cf54752cd14703e30e9878eef3aee5d7138bf30c8f04a8654a053c7de5e64c9d0d37ebe38df002b15db9aaf1a85340acb9fcaef19e6510927623c89dfdc4b45f1a438d3bbbfa5113f9f3fc1bd61a38ffd30bf32af71b63ce82bcff6b7e4353118ba358dafe42dd7439cee69c6f029c1dac0388583567a8f6459ea32a72aded45741b643489e868e42c6acf58b07881f7fb82a763284a4b7552fa23c7c93cbdbb318012ac35a8ca782ef784c0009f537319635ee1f2ff9cada2f8e38017a78b61bcca2eb1c128f5c38ac8a40cfcd3e2d369c3cc279cfded5ef8adbecb6f9458f0947a1f8cf04c55878d52dc53345698efb98b928b920908b152ff7b4d0c56f41c725d06369d66cca0b73bd0ef5b401d054628a73865bc25c630e4dfca4678689190702fab597f981d4e41f83a7a61df76c286453449ff5d6566f9e804ccf205d115dff050d30d0d54a70d7243bd019a1e54d6db8595c92e25ee1625e980a9df4bd183b8bc949cc3f2deb9b0930d04925ad7dee7ff7eedd363b616c07d67cabf8c596ea0e87408fef5d8bd0f1ec031c992966fb50c6ecb23b805bd46d947602f66ac1c13536326bdd585b9fb7210b4b02d0602734840295122e2db29cb21e450d3316bc349cbc1e96489c11f255c8eca9b365dca86fb570e3fa982ffd2b4637e66f4f6d570b53e584c33c29b6f00ba78025b8906db63b05f7d85f57073363ec742c9d0f5283a1066a9ee20550ed69caaaa750fc32e5c1df95eded27b1e2fdce005bddf1b6bbdc6302ecfc56f6a157acb3a69ec6ee826d525ccd564a2bcacd25e984a4824441d7e54c3dd82b4c1530b9ca2a5c9d695af553021c7fd8f262f60efcbe07b943450dbabf3386c2dbd10eab51c7f9c246b56a756301e88eb922a89db3546f876e81a65045c74683cfba5a120e73647f7699331c9e4b33f76bc5a64c863bdddd33dbaa0fcfd56c0a5d054a1ef1219d40cf3f844058b01384b11a61fcefab070dbb563d66a5da42762d6965bd006868c7d74c25c5e8288636f8a9d022a779dbf41265581ef802e51964c6e6188de8dbe7fc33c72538de455e7cb9dd81ec1a8cf6e0404d2a2c7a94945fe629987c12e3f0398eaf97010c66265268506c589519f212fc0febe98c1baddbb540629b81a16d2c36de6d9a0eb63ff8ef5dd6bc4bd24aad4c353f5957649a95059e7e4e468c99fc44fe966ddd522fa2c4227c068a38623c415e2f0068caeb9e5ebadd2b0b67a42a2a9bf9d157cc386cd4da29f2c1698f1ee4a4eb1ede4f071aa1ee081c72ab67013f65419de6c43e1099895662f03bc8c15eaca9bd50e6cdf30012c5e26b87557c9a29c0d0444f677480d49d4643baf27088d53fc313154bb780042b43534938c4e067b5e7c488f31b5aab01991a39efafe2070f989114a8bc53d13120549b2edbbd1822c2a0b21a1d14e99d673b2662a558d5e4f1a954c1d73cf157c5122258539ab68b6938136edc951d2d793b9184677302f973a21e610cded2b8a66a9de83c63806d61b670f62bccb44394cd57dee9a23ecb20c7e68aea59807ea9d7d049509771067d2aedf418480f108dc6219040d81088e94e839c29b1f59e1b46b016166ed750e16a4247c16383addfcbda3fa87323ee873ce0b3a08bfb9f30b262d916c50b4917bc7e3a0cf7618de81fb94e3199764eaff04c3a080d9c28807b01b8b75cc5c6ab7416529ec9b7d2ac6541401246db5f33a37b036e5785c269f6dfccad1e67aa84519cd68059b703cbcde74bb8f2af195886ae467b088e871aa9b7486020a649c183c9873b1ebda04de7034fa426400857ee1b63b154ab60dfeaa50ffcebfb559dac1a044657efa946709a45fcd78146139b1455088ac305b30c15aed2add40c6727b8b02c01a34ced0c15592b700b0bc734f4ebc3a403a834ba7db450bfa60c0926624fb674fe75de7dca62dca1588f5410c6a592ecfac4d2be38b03491506ac6b67ea471cac089f89d6681eb7a92914d0ce29336cf20a2b43bce82b07b1ead842c05b8d58a9e9ca951b5d59100d0533841cbe421e0befe0aace6f19d74a1bd8fe59edf655a1766245f59a200d9324698727a11f632b39e2d38cd3a607f3c9ee795a4f817b7e5c94bdeb321be6c43f00e282aeef9e90ea396ba1cdb10f4b8dd6a00d35055bc59b0220fc83e7a55cef3c7b3ddec30e9e919e164e37eb18ebe3435994ad88675686bbb8e37c0abdfd646c5dce3c6fef2bc331558d77b23b518ead438598e61d643674b8043972aee7d23495802104dccb8a783c0b7e44c2327a1ee66649e846799a9b57a811f4e04197693dd4dae55cf5307723b81a99fe71faf70fd39685fb456b952c66659806ab24a76e8fe9b4c4321bb566262d08a43c8bdb388bb563a5e730779d4070f7a318cb3cb80db17d90b4c47af7fc208cf5370b9171a7c12587bd3b4de761921967a92856974b93648a4a5c92da0b57b5a9b7b543d279eee412ce67d190b595b7ace300ade25d18f92e8fb9db8aabf40d32ccc1a9c2e8a883240a0e1bd6d36cfb2f1883f11e7a2732c055d491562314a7e7ff6e4313ad693059114e0a43a7f791b078d3e6623e363ceccb0763e46a9088117c6806f14d7621c002e60175804cfbdc308c2242419100102c5bd6b98cab5f8699a86d7365dca69c107ffaaae57b56aa97b4f24a7997171b77d88983c4422088ff205688ee88b017d3103eb262533ac93cc853874f49a9db030b843187ce2cb57f3df3d3a01bd589050ee7204505338652c56c02a7f52f206f90924da327a6fc5832f78f67165dccd7b90577c055ae98a7643134d80ec4945ac1d482d96294baea701f9c91282ebff242f95c3511a0539e6626f5f3d73b9cbad6f04628e871986d0fe450086757da94201d77dbe6779efe8aaf35050428dbb0fd84886cde2cae9320f01a176de4e71ce70e172289739126eac13d75ffc28b5a89587e72b65fc7f27893a06b9643d7983944f79c9523161b345cfbf217c130ffc86e99b83953ed45b1155b71a03af7708a8443b096e9102c24ca9074406b33e806a13cad06c702373bcd40d75751b13570b6594bb86734a739617c0730d0ebe1712177e665a8a2348fb67614aed518b7161d35e7a5c0826f320884235fe69d2132e68b5a5e5f7d8121a29ab29a3456603f7ab8566dbe060ed4b4f43289fe4ab580b9cff2417310be881704893ed130369b7e54e660d5a068ebb68aa03c67fb7308733d850116063779f2dfe5ecee91828afef331731806d7fd12266c0875fb9d6cb8f70975c5dd57d4b6100a2d410ffc7fba3411fb824656ac4eb7f12dea1343f935202fe336f4e3ab3dcc6ff480d8efe9b83d25c9380447ff7f1df445ab9b501bc985b1623b989f2d7ff03d75a7002ea5c04230c33cad5e536e761834575992861719f0a4ae8cca2923fb0fc8bb412645f769b09a56d9d6043b8edec294434abd5d4a5201a1404a5e75e267f0defb4cbc1c23e9e8d0a76257c3ec9e63dc0164f4a95839d5ea22f7669eec0ec873201338425df09387e6fc740016e9aeefd9baad9b87977a6445406285a666ffdd59ea2e345bda5b3f18a250d6617ef84e5ef5d7a54a348f442876563cc44950b3ac890c32f45e9e5c7ac3387299d6a6ffe2c846acf79da468b28cb829fa48a6c4a43f6b9576644fbfd7f9bbf9085c70dce0860123cd6f3c9553405db40a958d8d0a73dcfedf666d92751434e52315c124dbbb485c5df34ea18406ec93805186ad58cb1fdf41e313f37f4e46807fb7c4d55823930421ce2dccb3bbfd5aee1b613380df6ae14ff256add3a6668c7cdf0df058668ab1632e1ab56b1cc1ed7b0452e4f4cae5350597a821f88de83f49fb44e7dc22b16b4894b285354a7d022cf5e31d3fdf5369dee9a2a0330bcf5200c3268baed727f05ac0583d55a5714ef80c5324c809287cfe159da275053d5d4c5b70f1befcb0147a586165aefbd7fa8dbc2e4d132a663bae39485afb5b67b5d813928531a84c406d6699cba11dcef4d8e7241d90756523bfd4daea5637", + "nonce": "0x15da2", + "value": "0x0", + "gas": "0x141115", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x1c5d98b551", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xa5d4546e1ed26c74e189d69f3c3e522141c8ffbdd1deae5b3f4c3993514b1a95", + "s": "0x60761519c3f36cc3dcc400a278693fbbbd23c39fa0db438ea6b7d64383da66ab", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9b", + "hash": "0xa7acb2c4d14f48b99ce4aac8f519b5281a2feecb8e44a570db71aa7df2f4e09c", + "from": "0xe64fc30cb9803d72a337cf61ad5e1abdd4a0d156", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb0000000000000000000000002c5d7f489ab5695d7830aef46cd03db7257610820000000000000000000000000000000000000000000000000000000002faf080", + "nonce": "0x94", + "value": "0x0", + "gas": "0x10e2b", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x11062ff46d", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xcf2c303e179c74bec4113a0bf6488c0583da352dbf4f2d84adb44eaa2b6a9ff1", + "s": "0x5e2a76ffd855471887ff3b1cb4b8be9d2eef33ff02b470fcefb4988cb6a41518", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9c", + "hash": "0xd09ab01d2df2ba4d0e9714f79733bae5d560712123df29df514c8bec99f4a846", + "from": "0x09066399acb9e40125559eba1959952517b2ce96", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a6300000000000000000000000000000000000000000000000000000000000000020b080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000429d069189e0000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000429d069189e000000000000000000000000000000000000000000004a2024e1c7920d44814574bd00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000796a4503b444a71b331c9556bef0815237ddeabc", + "nonce": "0x12c", + "value": "0x429d069189e0000", + "gas": "0x32414", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x11062ff46d", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x37466445d6cb71cb8c878513eafdda0fb2ac60736b64c06754824f819a9ca6f6", + "s": "0x33102c815e60041fbe10334b22fa2a85eafa79ff0de992b90cd33ad14ed8fb90", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9d", + "hash": "0xeb3e892e14ea97f4c18803173da433c7465f99b8c229be46d18ee755114c5c22", + "from": "0x4ac674ab495b86b67dc4fe1572322752bda7a3b4", + "to": "0x1111111254fb6c44bac0bed2854e76f90643097d", + "input": "0x2e95b6c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000361f955640060000000000000000000000000000000000000000000000000000000000015ee223a30000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000100000000000000003b6d034006da0fd433c1a5d7a4faa01111c044910a1845530bd34b36", + "nonce": "0x6", + "value": "0x361f955640060000", + "gas": "0x37486", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x12b9b2de28", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x676f25722c90cfcba7c7e1825c60dce4e8ab0843fd17a847745e8b8285806342", + "s": "0x6e9c8833a2729798c283d0547aa49139e2b71aa15b6f3270b7e9258897c0b231", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9e", + "hash": "0x708f5653bee417468ef9efef5b6a1263355ddfa14f86cc830f6b386ab80cf257", + "from": "0xec1ecf856ed23ec51a076f3abc59246ac84ce3d3", + "to": "0xef1c6e67703c7bd7107eed8303fbe6ec2554bf6b", + "input": "0x3593564c000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000064075a5700000000000000000000000000000000000000000000000000000000000000020b080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000a94f1b5c93c4000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000a94f1b5c93c40000000000000000000000000000000000000000000000000000000000045a05698100000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2000000000000000000000000dac17f958d2ee523a2206206994597c13d831ec7", + "nonce": "0x2b", + "value": "0xa94f1b5c93c40000", + "gas": "0x34ee5", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x11062ff46d", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xa851db5aa8cf70a2e1023891464dffd77da82362c766894d441f13e6b66b66d6", + "s": "0xb511168bb970ea7926d202c225d1997d242842b64eca4ecebb4848ee31540d9", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0x9f", + "hash": "0xf9573187dadf381da0c74b3dd18a31aab68b5fc58f87648a296cabe108e5f194", + "from": "0x6887246668a3b87f54deb3b94ba47a6f63f32985", + "to": "0x5e4e65926ba27467555eb562121fac00d24e9dd2", + "input": "0xd0f893440004b68abb00008c0000040000000000000000000000000000000000004500000000640753510000ffffca00001200000000640753600000ffffca00003500000000640753600000ffffcb789cdcfd755c554dd73f8eef437797220dd20d92d22d1d02d2dd1dd2dd0a48888ac4060149090549414a520c1024a41b04a443f8bd44f4d1fbd173aeebf2beefe7f7f9ae7fced9f3de33b3f6e49a356bd60018c4fb180441a1d81262a1980d96c91f368aa659e095b56e8e696827980beb6409f6e04422561a8928573fac41cf17d7b228037ea08fbfa01ff1407d4fc57a827dcbfb51a17e59f2185dc42d44c8c0efe82c9bdfe25f88b620162a0e00200c1cf1cfe243c4a0e30841d0711418f15161e000020c1c56fc9f7058f5f713fd95fa0100ac3fe30f6e02467c4a18f84f24f1bf83907e7c18c601d21a0c90c73056da4d4c8e92063b01cc2ce829c2aa7f3818ed07266efc5be8af953f0c82953f008181ff6bf9935db5b07732777572b08095f5bf857f0046fffab97d65710d57e18de8d71ca5b65e047ba8aef1d0ec77fe61feb0cae74fe90ffb0ffcefdbcf57fa5bfd0756fff85be3fb29c1ea3ff030f88385c34d9cc020e8f16111acfcff76ffc1d372b471bb6ee27c95fb2ce08ff8a743b682913facfed3f4e3c36ca2840fdaca22ebf4ea470a9209423fc408427318f161cd3f12bf6c5390ad5f0ed5f4387fbf85c1a0ff74ff458381c32a1f58e3f3df6a7f7fbff40c467e7ce2673f9a627055dfe798ec184d62186e6b646086313f02e8307018df0fb37a607d3facf26d8281c3fff8f0a7f2c3df92afce28e27658ef8fcf7fa30ee6a25c9255cc913554f840597aaddb917e689996f4476fb787ebd89c17d69a9acdaa364acfed14eb1f1e6f826a77d022089675d796e50de553d2a5095acaf0325125e7ea98080a89d9ac787a01c066cf8af474610057809a7c21f4cdcbeca3facf77b72ffa2ab55d7e92f8c1fd415098b2cb7b567e506d36e2737c6c754351e1c99cf5053f7ea7b50c0339bfa8d097fab5c9e0606639884d4093449316a6c3d6e5a8b7dd8fc75b58eb7180791b108f2f481b23b3917200801b7bd1212231a1382c60e88cfe7ab27d167ad55d43457b53db75f5497601ae07b56d5941390876e6f6bf2d36ac1b748031bc341fe8e985af1f9b6032b777c1eeb5275327f2c9409a4a4724ae9f40b2c58310cad4c7a0f9ac337e6913f784772960d3b685641c3e6dcff4d17bc5666350ab183f7bf40d00d8edd98490f59c7ebbe215b1e4b6ec190cc1c57a0ef7d4763895fb4eedb9064e074161799f1e09f283d6af760356897287afa82a610806e198ad261f5168480f63b4963f3ad26b29c702d98fa453740e500d91a4f03269e92f3158e55fbb59e26d4988c8b8ac2df44e32108060ec435083df86e2a90485c2c3a726b7984b3cc951678bc4bfd7ebe2d330245b847b1c15549d8321dff30c6abb4124fc1d02299f9adb82cfbbb306abe9fd0999236bd4f281f5a3cdf3ac5bcc5b4d5ca661c43e117ce7d3ab5fe8ba1c062d1ef420b91fdd920231a614f36fe72f055de61bbe0e2740ae298bf26afeb55d84fb8d0cc436ed41023e00017b1f0123142156e5b4f891b32893c37362576ff20eb1c7338629d38da93d44a3e9bc1c54039fd319e42100832918931d1461fb944ec7823f9830203f0e36524a97c8aa157254096c7617bb3edf480610dd18e1102088f0487000320aea3fca0096b0077532a2f3bc730815f7e6869a3e4cdcff7135543cdc1a7afc68d4f7d0f10c3ce8f8674da8f82d73a88b4dba24da1868f897c578c0f07bcbbb578ace079442b8908a2ec63f6fb9a84770a1605c0449f7068d889b0ae666d00cf256d1fc1a65c47df8fc3b995d41d4a5278864d30951254b8bef25158feb843d3ae6310e580d5fb133ac3e5e129ca69873b5e67daf36694b37dbec5f6a6e2fbbf93404d752bd8de59124e5e3fc11a245f1d7a578c22f3b89594bf6930761f107e03d637345762d1fdd23559e62d021344df5cc0af5e5b8f759b47f61b320bc9055deb1dab6ee968ac102ff3b093b5c7c5151f9723d8c4eaef9a816c8a8763d3d022e09e791bb6ad083a369b534429dc3ce8268c68dc3c88b1b9ca551b20bcc5d44a2179c73ba026fd709470e7546a7e3e02419a549ec31598e6c0d845ca65076e607cda48a6998b5432f1c95115591d0e6ce5d48bcd1ebf4fcd1ae7b77bace80071b1f88a3449c7d684cc8a36bf25edd9aef4d64f50b994771b81fcb12e897ee6f9aa44c7f9d1ce05d4ec703f8b8b2646f0513de586b695d95e451b232263a663d398292a09cdd87fcbf9dfa5149945d410040e503231a732eea108fe80f78f67f46e6b7377070bed1b93fbeee2a313da318a0e7d50a6a640901a28445243ef20f7d6d22d8da690b068deebd6b481472931528ade7180500ab3d8b901aafd0a7a440e8cc6453b212c1bde0154c96b96bbc7b029af7eac7e3535e510705f183c4dea4eae7964cddba3a0d5b0f0b5818d57a6cd898ec9b96f91436638b51e99041e36b9d049fee51ef6687e292e32719bc69030f375aa8d4502c232894104eb8334fa7a150f84485d30fbf9b12944cad872cdb8ac47a776a51db83a96d50d8bff8e456506870a4191f1877e285b423ae95852a1d8e419d685e66f241909d50fe6afb707fcd2742d46729205ba78b8dea835b4735e1d7f829f197cbd8eeefb94a6d966bd345a52c4c9e3fb207809ebdae10b792d3ccf05482925d1395771122a254854d31ac247d31880c7b1b2282aacd5fc7e775fe6e6572dc24ee83c7f7fbd688b4aea7fbb60200ae684b2b6b8afffe3d3e9054308c5e324d64342f7f4aa06f90a556eaa65d7e7afa72462997e74d9cd8c279908a63d52060d19fc348e0a5a3a84c33329756357aee9bbed8e6970cf7042eb73d07905fee23778760d87dfd9ea1e26466a127eda18d6d34f7e112383f6e96bf21908d4d82d4209b8f3fa4e9f931734e7e451274469eecd65ca7f017ef10455bebead261f4325822e7df5d12fe0b213dfe92003c2660381fc5b7851538b0586aa18a041e470d7df66543cc9eb0c52a4c695f69120ebcf081b0b665f4fad3d2b48087f25a45af8bdd230797ef9b4c56f21c9a52c15f7702e056bae9193a6cb5f7a614434f945af8d0f8b1fb74121c575614df0feccedb255f520ab01636e99bd7cb6fd45b8a431a950f0a1bb4d7f75895520184cd9308d24b20081c4f57679b15ed5e5348315fdbb6dd63ba38999823dafd628d29a8595bd1fb4a8f023246c7cebaa684557f35328a64e593d5f1acc8a7d6577ac45ef3faa57644c742508809a204d00af33484b0796ee4966369fa0cf4175c9a7cd5604c7aede50943492573067f33ee6a94a360f5fa0ca7a1aec21bd5d6650be7958f841f2a3b044803f200548cf7f7a88c9bd267e7ee1917cb62e7ec1b194216d83983e12628438bfc2bb3b0ce6313cad8c92e7ce48c52ca30b1438a202aa8710c0ec43d08b375193e11782b0ba091cb5bf1f4a05ea4bf6a2c34bfd1866e115159a828aeadaf33e3d74a4f5a6a50a12bbfdc8c5f53bd75ad5d932bc887b17859b53b5897eeadb5f5166281ddc4801b04bd31e879ee15b61d83b2e5e8799c854265093f616fc7c14fcfae107ff2507fe629230e76caef12d537db8a3837ab8605f03c636f40f711476465d22442a5008bb2008c01ce861b75cfe8cfc549919b8cb1628cf09566564aacdbbcc6eb98e1f2cfb108c02e9fe2612b79dec1e04c7a879c679de8421c8be6f65ede05323cf2211b1b82740013b9457b00ad11bb5c8eb084221f232683ab3b5e143b3ce491b4dca08353b37c09caa0129e9e84addd9e1413248be99121c1766625db788c30c10172430ad9340b048b25f7d5307ada07861c4f29969a456979b5a5b777fb25b1478482db8742148de438ba157a9d2ae380e0fbb3caafc2e7eaef499675dae2f9d8b2967d5660999fbd06c1965e96575e7ef4c1b5c3b0703f9da710590a27317cc6769bcb7ca3f401bbef011ea2e38d10d308c5a78d2af1b8acdbe3799bcea437c66b479cc29348c8aa77b216005ce570be2e2a0d421d1501ca680685ed46dde995d96a12f3ee218f938d76074d67dfd225d4d73d2657ed1f49f83bb82623ce47b9f659fb3c721ca6cabd24c61d320cc183f8bdd3c3c1ad17a067ba500001229ce06316ea7a1d7c8daf8b0e717bd35f7eceaaf61197c1ac30220167c96662ed4557ef3b0b857ea9eb0677d27873a46e106f7f915db54ebbdaf9bd5e9ae6a2049aeb1cd7d19f81b12680fe187857526e73e763b7560a7f53dde9e21c8f95a603a2caa52d07c003100002072002e90870f1b5f282bc01bd737203f81e39d22f2fd45893652de1ce9a56f252e7be8d0cbc4b71cbc1ef3869872448856c2103726fe92dd26a13f55b1a749150c4cd84320000683cc2164ee511c3e54cca939393bd2f611bbe74a73294d82e6f1ae44c224501be8adb5a67bf9a3ffcfe94c859b8c65c13e5f32c0dcba7def91dce13167097b43b6e8654b3ea141c875ba7e4c05f41205c197be3203f99bff121c97379b949806875c14dca0f4febcd403469e24b6d620000684d261f9c0e73a87a596267ccd1aee43ee8f81286209291fbe517eedbf6ca2f98d3f8d744be85b77b8b3d139be8093cc82a2e8b9b3e788caa95756798bcf94e11fe6525c921c7b91313f9f349b878631b0dc1b2bda4e6f81ebb993a74780e969a4191323c9fd15e7ef9e80f591da79236626285f137e6cc5cb4bdbe84c1c3a76141cef412e7ff85b91f99fc29916fe1b17ea82e787131b2e6f3da1f46b8684506ef691323950a5a17d97ac866e4d29e84d94e365471a007fb7d34fa3095377c8ede031eb3d2442f39c7625ba81f45b5120000d21deec08c2f854394a44c7ec61ce9d8c5aed3bc72733e9cd616c2efaad52cbba7e85f13f916ae29e492d39ce8ae24da17acc0d29bb7db84cd70593a30b4d0ad953c7ded11fc15904e8de78ea8ca387bbc170aaf84fb83f2e5f68f923716eebe925ea8b4cb3ac6ffd779e68cb95fcf41bf62ee2f131f18815730cb7529f5b8be77149bc57d0cfb568aec67498d1a17e6b16452b4bebdf720010e8bf184ae9c69aba5155fa486f8ad319d8866af3ec43dfeeaf66a24ce273d5fe5aebcb5506c098953b92b6c9d9c0dd956d7accc9fdf6ae76a76abf34d6cce2f72577fd32385a22b1fea39f06315a73d2baa3b85fc556cf4baf5bc735896f34e9a7c40c1098409769d0a5211b277569ae9f7b47ba39efb3e7d5479d3b3ec5cdd9d41217600d2bb0fe90a2e38951c10030992fbcfa56e7d2a985d95a2f75e29d34a7846f0982a3fa806629e56ca7c07c687ffb4be6822084c580b4dcdac98d8f75ca91626964a7da1032d3264921bc6fa15f293e4f0b000dc55d410a4ebb872732ceb9ed8ecbde9ead93fc9ff2f10aa99b58da3c9752757585ae1df109c9333f417f84077ab01ce26859283fce1d104adcb946fca626ef49ad1dad433a05a5c7fa474f0124416d11d9abd68281fac4ad02da636464100a906976a30985197cf6fbbb5362c7c5d3f60acfe53e512bc665ead0f0900001291fca0cf6e65747c5feff5c3b74145e1e18d9a9161efa931df644d4a5cd6e41e4bc6e2072f38d3ab382e77908669c44b45eef426d6b2484f618a3dff5c60f246667d9ff6cd576e302f7ee7465f71dff366b80743195dde87dbf141c2e1ecb51b30545da46c7000bc73243fd8c39c3676f4e195aa3fc6b55563b8fa20b5d84b636a92b1e0b0a22b154a553413883acffb52377fb5cdcdbaa8a8678f279aa466377f75fdea71118fed92dc0bd3170044691fa210c2c070dad8c9c38292ad1fcbe8701fba3ec4eadd0a9f9879d1106171241d543d833073adf4471660299661294eff3741b67e7cba74c4e97ed1733beb7d5c34f602cb6250fda18e1cb4c8348fc97edb17597097513a6fe7f741cf1f00f8c1e5f98675e304be0d4bc92e8a974f4d95df1e484cdec52651d6dd95a8cfee9eea07157adb6bad08b2af15b96458a7eb2c18f95cd3a7db5aaf2fb66818d43689cfd4feaebd52fdaebd32c021a16d4f7a81667dd2d25bd623a022a852b3f2dfd25e41fe68b7e327edd590cc41f360b7beeed6b0609f8764a03880887211010102878a88840cc0a3fca30cfe487b45ff79341a1ace00b0404d1f268ef406aabe94018317ea560d6cfc5905541cd33b102a8eb30d75ddc9403de2000dff2275584f792474496c78a96e57d13339d2f1a64bde777e2ab259c0ae949fdd2d432135c5f998f3558cc4cc54f055db947246df289c121ee9d4ed12caf50fe2be775e9529c8860a9d9f68d4f0681a156d4b2bc915ad7b1d4b3e6a977cb283100a1199e03d6ece9d791a6dda1c20a2c5fb9a9425a4e27eb5ae71972676e636a9665741a73c2cfe1ce447f626e8de87b6e11aaffb20a779d79ae9743fd408a94d32d3baea369a8829b36bdacd47b25c1795056abfbd27e58307c941546fb96dd9b16b6751aa23816f5d4acf2d5c84632ddf1cf38274cfe41c26fd66daf24bf28aab9d9fcc1321616e6e9df8a6e5fcd2dc242d65da700a15dd8c4cf5d3109d5de69b476ccfa313ba53f941574abd770fe492b0e81697350dfa354f7234a3ec5b0eca1bc29c6f3b2745dc6500e1a49234131e5fd608afbdbf18127410a563b0a6427b3fd97166d8725255968bf86c8425303f1d1030fc1e2673362de38886520da22110f3afe97a933669a6d60555cf7c020c1e7cfb701491a6265e93d4a1ff796e4463423882515c30f51067a643c3ee23c4707caaaf98439fd1e9e62c81f855fa5fc43934c8990efaba00d7b19280ee5c9a16b50eab8eefa337c973774f3bcc196ece057f5da0e715e5b948d98b0821c6772a75f017bf8cdf90b3f4c30912656dc876a89736f21b304afbd15bd5340bbee27c6040e25d33fb71d6e4adba8d62a2170ea253a1b1a24901281f4bcd918555de050582b88d154f3fb9f8e4cad4fb5a5d40508d12aa2e78770f819bb13b915b338dc7f4d6991cb80e530e6c21dea3e17f72f1e676f4433597a9371a6b152d4df8c7277af469933bc3c994de203a37b7d7aca9cc5c9a4cb0a5b954c9cc79816991e00137dda00850936d7d6bf94cd3e70d53d3378397e5ba5d31bfb91ce87a9024f0406cfd39835684d34df4c427aff94c68692d4132b4c41d5ccf928ce93c4840fad3ee95cd748485965089388eb1edb45b769f22cfb680824f9b0884193df957925c50750e95c4dd573fd6e76ef63bffb8edeb17b2b9fa5aafbdc8279198987c0ba3091857d4d17de2bcc58aefadf609c5b9d03b44d430b4bef681836db7dfa6667bedc13518f1b11cad4ddc9dad3d3805f879d97e25bcf1831cc4844a34e98327fd854f2c1a9fcb124cbe6e4291b2478ab61b50ae9352094e00c568c9d78ce92cee3c050dc0da6094388af54472b381ec488d47533ba877e458014ca77d4cfb5014fcdad33d31c2a9c5e4e0f9c4d90240f87947a3960d88cd5365e051461c54838940c7ee0b6b8673fcf1210c9daec3d7d4962e2db7980b4fe5a1fa3a0d9a2bd4d808491310192226da0697d28fc053dc94aad016518f7e64036a3d413de5b1385bee041f0a93b8ceed70539ffab4db42ece90f6896a40dace146c7426f1e511c3f7e55910b833f18fbd118b0f6bb614ca2e8b0ec09606d3cc1ff59fe000c7b3e3818d1018d3d35d04bd844405be4a3ecb800df52286b7d933b5597270dd2837e2b2dc41ee492fb8a415f092cdb269bbf95a271f061c45859425f7092d640c68cae83d1d2b439fa802be0e3d2d97bb04c607ea2cdbd8d20905f7eebf2b146c94ee1b56a70d46b94df0b7c8f4fc60a3f89a382d653ad056f031e76ce94b22e95eba1bbb46a245f9b537e20cc1afff4783e15f173f412ce5b845950e5fcebac4bb8be9466255d9abb51ae823b88d9a376d38a7a5d29eb08542f0982c10165e66b7d1568156defac3fc87533ca083aaf865f0f66ca2c1b1fc9cc7d424b010ac4883d8a8b63c80b1a57c916625b91db7864a672eb538335cfb4ecb5ce87f3ea206385d60e7d4125b7cfd166cb118d6db611bb51977bdfcdd237c7c111919a4cb7c0d68638ff375a5e6d2d959239812309bacb7b0d95e30229cfcbdfbcc40a13b82418f40fca07657d1f6595b20665116e1f651612aa33a057f33f3d52ecbbf25b8c12c0f91e8cf393ce9b00f8f6f8f50d4a000812fb1e762a59fe2f8610ce1a4ed0972c507e9cfa70ce663984b3a8df89e187ff70a7fd0eee4b2c84b37c20a7a16733180401070e0048bebffb1351520238383f738402f95987bf8f3cbc773ff9c070c09692c7b2bb9e962347d8b266c89597617ecf0cbca93739bbbce2aff75ac545c29f3c6c730c9ea069b782915c8f12ade3454b0ddd4f1d365ddb47c37435bf483c0597d0a58a86a15fd5f1d24f2f10f5f297572afb3472f855d804b026512d480e14feb6cb43cca4bd0c6200e0f7cc8d7d522f219deab86fd35c667dc4175b789bf9e186596b1611c3f635a56d9ac76bffa7ccfd1f96dc37fa3795dc3fcafbaf94dcaf12febbccfdea03ff2dd5faef60ee3f54723ac9c4fe9746a6e8cf2b8d300d71288598c6d5500dde9a04c7a95a099fc6bdd162e9bc83273fcd8428583f87e2833671b8b91b5b49acc093f4fbac119028542a9ebfdb7b9face456f294b965ec52b4e5eaabf50a82304896d7d42458d6ceb56d444e2ac47239648b7cc6c5ee491336b641586fe973711dd26a0f1f956a585ff5a7bb8ed430de4048823175efbd4f067bb8c79ce4cef5bb519b6a7b331dd360b5b4074e82cae21ff127840726a4cc232b693fb7c6ec147fb247562a1e19afaa747fc2122c03f3d39c60e0b03e8de0c7875fb598dfc6842b530fbc68eab40f11fc4db52d36ac1ad1e610c5a9086d29038c37ef0951f3d996dcd4793156e012a3d9bd1452f3c7068c4fbefea0df041c7fc9bad3df1662a1b4956f1d000611fc6a34fdddcb7b63bfeee961209cec9d9eeb2f09475d5cf00654cac7f18cc6873af0a50e11ec0d91a5e85f124afebd01fc17e404aad662d9e67dd0f970cf934c4489f2e184d22d74e573f373a5b81c23f57c4b3a19d57f5814e83761bdf12f044b7e8645b0ac9d217ce0fa95851ab138835d260d438ad4684eed08eddbd5b632bd1370e5d5f72baeb7e882d6336f684942d925245346eb86c9edf2d88e48d6038e92c23fa2495b1c9513730140f15e6108c2b5d30528ba4b59b277fd8d012ea9fa98b49bf4aaa9acda9754f3b9dd82aaa5929dcb1a7eccdb5af3e4c025e791fcb833268a3f9f0fa39fae460454662fa302fce0b155bcf43c7fc29b7d86c5e1ba5ac977ba1f84d5e7b810bdae90bfda656fe3ac020986676338dff34de6f7cb46cfb55d17b6d18dd097af18f3179875db2e51d946f8ae6555fbae658d39bad04efdecc26b7369efce172ba275790d706ffe6b5a56c8bf4dcbaacfc19db98436919c72a564c9c032b1054010ae47444245049051e0e011fe59367fa465059cc499a0e3d26dd071d978e8b8de2be8b8730b743cc01a3a1ea20b1d8f29838ea7c0483f4d0a2a0e0008a4568ee9f4ebc78d3603db84f444e138c5f7f108366767acf43f589a91c56a751f6b32026bab7964799b2cda489bf39f4c3a99afb3958c2ac2254dba752fa8bad329992ae3613cd0c2a8c1da5995bcfdfc7992dfacfdeb21206ee726563b31f7cae7399c1b9152ea776eb1a1aaaedfa81012d2b50079ecd57522af89d3f561794ec2e2cfef5d6fcfb9adcbfb668556cbef49e8f6165e788d94904df6baa3f5738b590f29129a17ee5d91beb0353a26e3f4aaae96b266720e82825ad13e36603836a23b9ec75133bdcfc2184f4f3b6b59be585d9a43bd8ef5024dc1d84bfef133d122c67bc2a44d9431966b5d22629b42bee8af27223f694de1e92052acdccb24d629663eeae703650430845954096ef1c6c6950ddff46ae7747a4caec947296d3cad4eacb023c3029e93876c6421d27a8f6ee02a5fdf2b792bf2f821e05e9f46ce90e95480d2a0fe08001cf6ec42213ca9a703c2cc6453f2bd2ecf24c7163b2c0bad801b7a09d5d4cdb79a3cb1c2e1102000253f98d99f2c391cb6a63871ff509e9446977f309c987df2892c7edba54965b588a10690791523ff3d31f9a5a1408926597973fa9dfd0e870a7aa10a5a0932be3a82d0e233ad6ede06743564f5cc1c5fe3b91f0b5b8ad926c907b3345d4632d5e56a60d15cb7cd61298cfaf95b0742e12937d8f4fd2a9d190bfa272d0c893fddb59cf1fa111fa969e7338fa8decdcfdfca1e28d1db8d41cdcbf9112f34edf32b5c7f318bb95430f0aa0659961da8f8698ff8ddbae24879f66e1cd3dc9ac4a3e1609f410817131fa8acfd69c395e625fb6d0f7686fcee609d1af7c92a5fc36ee6b0ba37c96dbe1388a023629db5ca8385cf6396bac3a148b15613800bc763f36d7a024f713a8f6e03ceaf13d085c3efd5172400a7365aed16fcd8b7d9cf5b3262eab2da907050b5545d93c5afcfefc1255aeaa93de98331d64198103f017ce0a4f37573488cf989591b664e183f412e870e4db4e5b684245deb27c3e7b41881e0c5e30191a7f747e987106871e01135aab08b373b4d1747ede98d7543bdc5ec3201c07ecf3614e233fcfd8880ade10022d5d2a4c3eeb5f7e3953eea0d2efb04ba5f8f08f081f3aa9c019fc62f1f8ac55dd15971b2caab93f7ba5f327599d5ae54fa06c675df42506d6985cd156269719598865be489c7707b3cdd83c293dbafb6b1c5f6a5c319700088e33ec42ec457e37b7621c52c3322dcd6c23acf1447c5ee18ba15aa6ee906d54010aacaab7e1a2ed9cf0587ac68ca24dcce014d4d0b24cf4bc8d0b8c1684eff94905b14ef5d115a9748ff93f36be6c81ab7ff94113e30b811635ea6199ecf27979de96a345cb91399e0a043898e14ef0e736e0dfd722b8888f2d64c149dc921f342027ecf53b2073e6ebca06879d2fc1871ca1d72a4cfbc00c4691f621f0a797c2914675ff56f95783acfc66b7dea99a6443984a907379f258d575f69fed32ffa0d613e5146b85ff17bfdb12864aa2fac769407ea32ce1c59c30c5646ce16aece962666bf3de0cb073a95cdeebb1ef2b94d8a3c9512988aae346b0a6749d7ddb08c3eced99974917c002a65ea2d2b150870ccfaad70ddc033879b6ec3d51bd4f1510c353e22ec7d695d0c00317b5121f098a72d1ca1e865f228e290aacda842adc56d3194937b9f0645372e9204e5802eacbf9d8e11185fd990649c9a8ca8b15b9b71b617703421e70c53694210f84b03afae8f75d77570cd05ea90af675619cea75630e56a96545d986e0b22f529fd2c80dc5878cef1e5b3b25575db4f00dcc77db8e510a24fdf87a3a49ae1e6019f927396e119b1a4fa7d7ab73fa46e05d5c08525932868c12a3de84472e3c7a78b35f6174493de6a1093aec5052e1cddcba72eb6f9113772b9a221bb5ef921f3f6dde9bb62939e2fa3eea7fd59febfa7b552efb59de4a2dff95b401b8cabbfd60e63316b8e12fdc7fdf9ff9660ee4fc022f1047c4b624ef9159b13c55b2190472fd73344a5b38c47d6d263abdf2f570560266b7e1215786a6a46d69897cafcac5fc8657d96ff3296d3929c9225965ac415ff0a521819f0834496d113e756eb7344df43fabb9e8dd1270617761099f68b342544918f6ad092830c2fd97a13020f2d76fda5303466e972b2f2df69362c56063fac38cfd82ebe6779b6a9fa09c6a62a1ff8866041b4a2db14a1aee69db75bed354b53b89d20dd54eaed4c7585730c04645ca0f8a51eeea5f1ab4cc4879be47372468ecac7e7786ae10280a91e5f2951affa6c0028da2b084e83b5d03c297cf7d3bed94bb13bcc2481ede2390e39b5dae75fd6a4b04df5432d14f2a25b7c608b3ad33c9a225f96c60ca6428b77c346bdbe73542077bf1d753be57ba467c76120350b233631885fa92ce72d5f3f182620b8ccc23d6d8a058e5009094ab4b9199d2d34e1d762be2f346704f09328f54c2d74d455e7c18b388298e444b1ffb5c3687f64cff3d3421399d32f3ac28a60e541a722952cdded1500f2400e11011115091e0e19f2cfac79fe6ca189c0c50f353202972f547318042eff4da838f733a8ca23041e0ca83bb108bcd250057104de24a8df8fc07b00e5c40b00205cdac8868a0bd8b442c3bfbcd2548453f056947ffc846df7c9dbb5cb54f821540b9e77ada9d6fd3b43bbb18ccc5da7f6881de2c472656aa756d904963296e4f273eb837a2edd150bffb8764f29c3722a2e1e9344535060ffc66b71ab81364b51ade90540ba97806f9ef3262b0f29a35635ff6d673734a11b6378ad44928c9b0abd24cf16c84a522708d481d6c73cb0f813ddd29d7a11b0ec0d8f1aba3c693925a685c272844af352a5790071608986c096ffb97bb67235654ed8391ae22e4b21487b78c9a1fc3b93cf779a5e549b4d25225021ab52e7f1f5d03c997f44126572ac5dcdb5f0a4907872f645282ab28506d567bdc699e29b75826543a8ab87cbadf0b43c43892a1632085b45341c23ebaaaea5fce0f51ebcac9472b682abfdd7c99ed65e7fa9c8334a3fda113501a98bd0bbf5e14607e88978a3cb9ac2b27b36d88aa7c2e3f012f1f5732f34a915c33209c355fd2c03320160656f29c4ac2814ff1d652842d244f2aa3c15d34e1a39a33139cdabc3cdf0369c993ac2a0ea30e5885a6118c5018d206b9b58bfb5c556e8d20f157c4f0b350173640d477eb089ab29032f30d4570acffa0ea7d6558bcf31089bc32695419ec83e0e1e0736da203cde745258b8abdd85352f9e017394a368cc26b8dd8ff9aa62d712b4cdef644603c0d1de4108ceecd763e02369c9e53b9c8563961ead3db1ec9d231517d60507c29282aa939976df38f6a8a3c99267330ecd9b7294e35c96ef47f2247d3bc9c49edc12583b69436bfc86b553fa05424e657f44484755da79ee935d25c789c85c01e1b64574115273640d6f0075af85f85786a5de9d78c7db94cf00166647ed1549da29e3cd385534fa148fb05f599972410080eb0bc8076e961aabeabcaaea8cad154f6e9ceaec7b4d8b985f93424356c4e0364e6271f906e8391bf61057a7144950ead83d0c639663d3a1c2ea4889587a6bd75a6c0ffe0472362f6ec234367a5adeab1251e2cad657e5c6dd2c597fa215e125738e8ff1724199d15247801108e2dbd871ce7309b3161255897a7ba4333153e8d8f7396d87f8c265747578445d0680aebd8ef35f0d808644923b1a89e07c78a691c7530cdd42e35092f598a33c83aacd0549c9b47ffc585db216052ac7b2b07599b5b9d616f9c62ba3aab0dc7fc0d226007ce08e0cab6667107784779893f1b8e1dda0e5bc24cb62e514667d71d72905c40f16a0d27925975bd71302459077efc429d5475019690c0e82e8aad3284d8e8fa9c0ba6fd3a6fee1f76953c5e1f36725eb9d1b1713e7333392bd9d3a20daceff0fea67010fa7b1077cb17c97cba66bbac8b46c01f82c4304000582800887840affcf1c93fcd1b4898511085504c7c2820c41c571623ca1e32d17a0e39d895071dcff75cce4df8ccb43d5df629d778171c20040b8c340266db9beef85f76e8d763f94c9428e6661fdd3bd4a02d7d495d795a48427ad241601259f9989ee6f1eb28206b34069cb3a7ca8a7d1f211267531cd4e03db527e9d10e641e6f1d4bdf3ce6f212b6fd9a5a85e8bf43e9b4a112b3d5085c3674bb806d4bf18984d794654425420ffec55b0b25eb72da78963941b7348ccce1d3f58fcb1763feb8995bb1941b1d8901f6ff832bcb359776a09b8aecdc3c26bc12134fc52adf9b6143b590e6a67ccb6b7a7d6c64125bb01f8da9e9dde6e7ecab86df6f08a75fe7ba1327bcc0e920d31d28fb1f9017326d7318cb48a2979322775e2c778a9e5780e65f1f5ba88684f4806925137097d2117f8ed7dda4fde15a0fb78c3df46e007ed63f6fcbb5acaeafccdfc0e2277d483d028f22adbb1de63be05b15e18e980582049a12f5578d4b4a5fd87d738d657c3ee088d569817e2a9dd61a8172641618c7a7aa6c11229fbdb1a2ca3427ca7b61b26974e86978ab900cfd6087d646c180506abb7ff0e8764d3451d3eed80d51fa19339b2064c0d9b958d93a789a3a38d958d93ab958d13f7bfe2fce038eeb3c1742396d9b96df83b04e39685e74056df965af320f6c1cb9249d3980da02ecf8bab56764f3cef07b81a653ce1bb17e612b97d597939312da5206967b4d9efdb108c51f57d080eeec1a0f6dd6fac46b941e061709467e4954829f4df73a3f12763f04f43b0c7f57569d93efff83a09910263bb97fb00a2db794478042454446414b87fb83efa93211852a30a7588facfe35a92ab50f1abb770a0e29efcd05646909adbf0b550f13443684205a426f32e1114fc0b21f8f7775c63ba430350154766ba5cf16ad47531171149d091c9eb2eb932114dbf71df2fdeb2072f59b03fb6e686f3def9dcd24b6eaf2b8c501b1e6ebeb25a9f7b7072d731938048fb317995864deb443659e3f9164a992851402732d6d5c0eed63cc95b340926ad4f2551b772e2af1408af3c89f4cbe726ca2aa66f254a4c8aca61a783c51f9f861f5ceaa1bb8a7a897b51752cbc0fe54b3569e343b9596477712347ee37b5ccbeedd70732695c87f2902a059b77062fd53145824f1f1c01f81dcca9b15156d771b60ac8266aaede62afc49df65c4e88d0fa34976c1458f6a26da4819c4446c8f076645313f6c3a514b9f755dbd7f4f1512ca446031e203ab08f9b1c536ff86bf383f6ea4774912e0af64aee7b4fe03bcdb2051f05661204978ae0be57734a9b31d4066de12758cdab660374bdb9e93b3a47584534d542ad1b6b0d06a276d144563cc8cf86601fffd3018168daf82f0fc1dc42e4398cf9b198fd37e79bec898cd3069d24f36114182cfa5d37a5616fa065bd8a27f3bb0625c4102a5d99668045f99f73f2a7202eab25aece0fa2e1564c2eed7a795e43f2dbef22dfa3eec1e64fbb4a8660ea626c6880f512f32d680661c0959fdfc6693081c89925c5d2647a15a36569010339c23d197a2b570d0048dc3ee44648fcd7c51342fc9be470fbca96ee67196c548c82e60a59fb97843fe22e04d54068394d421efcc8c2dff11a06d970259d9220ffe928cd2ce23a1385eec9de94ecede68101a7c6970184b00ae4977b04942a2f0d45e8e04ca00d1f5f29a7fcf758588266ca244cafaea3eaefbaaf3f6bdb65357ee9a7791d8190acfcdab95b8d83239c4388d61f63d9b8f840ac9c8be34bc40718b5bc6e75f62fcb9e754d5d4adf38548c9a5f6f57511bea8a05ed791132462d43441eb68c55c7a086e83fa20b7f7aaca55de9db9a5a6b37e23270ea5005f2a53642d45192d748b81e7a058899a6d22c06195105628b6ad43a4742fcb18e9732f083f8c155eb238efb26e82f663d7c1db3c3b6e6426eb8b0248d8fa2af356c8035d5055ea04113d147f32b22060b59cdb5f5c25d6efaafab5deda6ec5e471ed1c3cd173f4139ab7efc2ba7d58f4adf94fc24c28ee6585f3ec13de182c7c0d11c5ce4abb0d02fd5af2c67f9e2c7b280e5b4f4df7df6b2ed71d46f6d38c8c6072d8c2e0b42ad3b73640dbb1f9f7fb55cff7d6cf895e6d8db50951942a99d437ce06d15b7f9f9483f8c8b57a2528ddedf8dfe94a84f65e4128ac3c492d13e3ea41504bab635cbeb3151b7b861daddc0d8f83cfeee5da0633eca87cd848c60377c92b7490084751fc21442a8f8f560778058f216b5b78c7f0869ef16baea62ee9bae9ed63e3d8ea0ea564a99571e3fe6ff8bf2fc45d04ff45b8189f4b9839572078cd8f0987fcbb613d5a6719f345592f1fde88298d5389b6e3cef5c081f78b496f3295f43667de5c2ae8d3d4ef2a3779b1428b40483b641859a3948646adca051c2703eda689d69294ab2fc5ea24aa8e638ee6bc888c0b86a6cd7cbc798e1ac67ca8f2d989b02efa7893716d70fe4c2f5d5a9f83008e779383cf4151dde0e13513ebc7437f6fd0a689dec122b22bb6a84d1c86f436baa4c2294a7dcdb6baf6b91ca757f6c5d30dafdebeeffa4cc57f587d1ade4e4329442403b0d1fb96c8d453ef57dcccdd9389da06aa902c4b4ba1f3f9c28e1aa7e868ecc63b9462a670faa994413a54da88b590804fb153f98a08bf97ee965af5e80dacc7196b73797079f76c6bb39e3b4ab2643922ed62be420dbfe70198377a1e2118ef2f8a4badf21d353866d220d0fd5951b7eb27e818d07df64ebadb5ff39e41b81355db0e5ff39a21f6d857c776cc893d7c9e5ff41f586760adfc764f3be2067d9f29137e7094a0038030e24440009051e150e01f27fa0de40a2c0873a7320512a424d1f89ca00aa7919129df56fd5aca7f8c539a8fee291e8b97eeb75f12b5eca0c1567d183ea820f89a34d0f2ace6bd60b0d3fdd162713168e2bec8dd6407b95cbe26d92a20ac9c86d0e7e3b499af700df5bc5d2008f5fa797c78bdf442eb3d7377bc9abdb8eea3540664a2c2218a6cc6d824fad4d276708d7c7718988cd4e9d48d2677376dfd21d736f5ec0d66ba2699a58e78eb51d8ee9c5aa839631cf2bcd1142ccabcc18e8cb1d969917090c457a5fb03afb84c0e24f4c2cb08b2099b6f6c2ebf1bbf49fd9061cde873d73baa0e5374db64fdc32651b892cd77c6112ddddaec6fde98cc28d4bcef5a257313b967916abd49ee9ebaa1fe42a91ba9a7d7e91569468017c1af0bdbebc52d224195041bea093745e6988083b486d22545710bddaf0caf0a5c3e73ae8a87055689fc71b4ab5fc0d639bb1ee5a2cf283e499765534094a89b3ece7d375150e88f3422a2eb32a8da70f48be78b92c17650e3a92b35126e18c6c381c3f5fc9b358f8d018fbc0e3cd12cef245a207136f1e4a77039047fb90a21008f9d7112cff63329bc427029940f3c0b9c8cfa9cc9bb563076eee1641351029a5b86a6ae8e521f5538369e7bc3f114e7353d74aeffd964566dee7226f57a8030ad28a340c07e752501be45f30be87e54057fc59cca7be93891092103cef87ea47878113ac0b5c29c0d3759f0bc8d1bdccb122ee452b3b14cc90c89746bed53dcd52b60af56de711c5ca5337969fa099ca61bf80b5e1cd074e22a787edaf5db135b91c3f786198dc457b53f5ed5093e2d8099a8800c128ad09e8374881153119e671dcf9705cb337206f7edbb0c39adb3f543f99e7fc10fb483d0044ef45527c1dbe8d62929be1b3d8196fce8b48edf65f53d1cdb9dc784ca9149413f5a043eed7ab8b83c76824e4d1126200c00fba34b74978a25ddc38b9fe4a34f8d6131f44a68f04030b4fc74ca4a4a44de8a7a741217b791ab14f15baf128ba71288d11b5dcda771946fc35463ddec2d76b21d0289d6e898740704e05d33eede45f897741d552a8860f1fff580a0bc8ae4b3b5393372c2888d03b096f30e9977641f1c0ffd503223f3825dde18b984d3ca52d9ea29962ddcbc3122d9eb01beb5c9a6eababf6f4e3da13d0e885ade3acffccb1b29ebf83cbd0fa72d7ab7503d6fcd1781555e75a8d2579a76f931fdcc0f7c9ef02d927154c2471a396ed1b172886c78c3a0c1855ff7b5be2ffb6c9ef16f509e31e31e8751eadb555f1b6b51980e8ba8404c023c2a140909051615dbbf16bfab3c92fdf1aaa6e1f297f6a1d3abe3c05152f9025838edf92858e17419f9c61e18517cf43c54b4aa07a3c462add7d040dff32b928d9b30563bae1df21307f63d27af98573dda3d48d18c04beebe9ca96c6e523cf2ada819d692c33039ba5df6fb556fde2566ae9488e018b395bddd1b60bfc704316117d5dd3667142569392c7a4ef896faf592fa08dffe183205c2aaeafa10e4d17cef73bd24c9837b24731156591e20aafdbd9368dff849ff3725abe7a712d13d6ec1e2cf332b8c3bac9c30013993262026c560e503648d78e1e4a1bf4b3b6908c28ddb996acea69b86ce32712a0ccdc20e9882c148fd9a53ab8d4cb7bda4efc625e1bb5d27a9fae4b25a7e9deef94cd88602c62b60ffe928d9752785c9b0f022a10f937d62f284807aa08c41e7ce307144a6d1f921f6470cefcf1b7c303b087a37af6d7b339d1f6ce139b96db4de0b2f82c469720f5dc80cc89500ee2abdafb47c9a8bd0325bcd09525aa4641478b333d705bebb8bcb716d77426ad326f5e3b842cdbe93b845fae3b5b375c236cc4dd20b9df5211dc382f389965bb89e287422e151f794260bfada92c4700821c1919d20caf20573ca8fb7af10e1dc77d1ce756fb88feea5d437559554d02a42fd62c0d2facca92732f557a79ea43cc9ba65c88b4bf1d16536ada2f1d2a14a1ce12297eefdcaa96775590f5f525de3c8bdc015e9907862364d9ab750bbc67fd1a9e7c7782589c5159d4806afeb62720969edd3706fbaee47a369f766546bd304a81a315ec30a587bee3543ea65a7aa60172e3824f262d063f86194e8db95d41d35e21108a446942496e7968883015f5db0edf15c374dcfd676b61aee8e144569769b474834a29994290f7175bc8a429a4b7787f1fd19380d2cb46c179ad9a496cac5d6494c08fcf445069be9c5d665794b0115210756f7698ea8c170b71b9b045d7ad28762815457a98af61cea273ce4a9e1633d2cab49737bab7c2d167949e42d3a86eff35ae94210ae61a97ecc0e1443f2460ee67592c63a64d699997390efc3b956c091119a48b908d7965bd29462d8e5ba44b8199ab973a9c6267bc22cbaf8d593a3bcf71682298f0054e25865ad67800143095b4b599cc318cf0597e127633565e64efc4b487b1403cf698c6d4830f505a7b3cf05092ed727ea1100c7013d949d84528befdeaee36fc5006882e914daeef986f56b6b872704848b9279111126e8ac3b364d7aafeb1e97dc038d17c6d8cf5b1a50350c2c0753e1debcbae316bdef1682486e66ace36a66f95810403fd63e618fe1bb4b7eb07cb436f748789c265b6289e1dc808ba90b3e2aabd5e139430ce27832dfcfda1fe6e979f71609c42ef7d4644acd16dd4f388fa4107934c3066074ba113cdf73f5ba3339814f1ebedfd9ec7e7fccc07fda2e3d52e12df5d238edee45175ef4074121b1fa3171dbd4b8351625620f0b54ea7657b7c2df9b71f53deb83603a22d63ecfd6e661cf9534e915171b24e16889e820bc129ca8129a9840ce06b7cbdfe9a1a44b744c9640e744c642db27f44289b2a369966fc82be21ccba60ee518045b4d2cfcb8a314271265faba37de5614dc0dd3e1310a2f5ab3b0d474daa4d7ba97b08ca417abd5e5bb94cb3c2b398adf1f1f228b4b5db6224b68bb67b79eff429f17c069ba9fc362f7d8d057ea28114b9d3aff629675522e87267d70189fd8cb66bc317f428adeb000819b0802e7527c2f9000e6b216ee63d74657cb823b83d1fa885d6b00dcdaac4b07647d7e55f04849fe6ec8b7e08543f25ffb3d699a57ffec7721c064a1defd59a30dd3798298d4b847076aa070f9bdb9e4a46eb22cff5054839aebb823e600de4706e16254245fdf71d6a03b795deae4450feeddea905f2995866bfa6c34e5bd211b164bbab9cc8b314e9571b8fde4da39ab669218373eb3678ce4367d07a9fac78039b286fe6957fb8b4e3d6945ffd5a9a716eb6298f6b19c469074e88563ac83c094269d38a3737db30f060c5fb009a7e13d9dbea58832f6bee9398603a5bf00f6eb8cc865dba2140e156df2f78a8f1abe9e5be6ea43fd2ae4e0c6507f73b0c8ed2a377c1a8621eb0039d3ce9dbae1f8c6f20fbf7a5f13f1febacb8e436c71167e8ded008fb728a22949ff096d39dc3315aa00f7d738e9fbf30607a1fc47d8a8285eecb7239811ae3af7523f9bc65514f014544079b66c294460c020cc39f81ae782ca977c33ded2f29e8e65b9914e67cc2115b2909edaff41341f8a7f610ef3db79dadf308794f16928ee3442721ee6b7705ec2c711e4f18b22469ae839c87e54e6085ecc1103f341d71f777fa6e7147c5c3fc7bad3f124df6832cfff89d273d6d9116e6c9f763bd7518b98e3716c05713d490080836026a59e969c5eefbdd2afccc1212f87af9d4ea98c37bbfabf30870b0000e9ef99fb9f44e46df62abe95dc82e7d16b8e92a1bb0adc85954c9b76cf6bf6a83b97a73d370b5ddf71dc51265c1f5d531d43537c33fa3c407a9cc873f4f1a5acf326ede9bb20e37d886abbeb9baf8dca49e36ce14d72e7bbdf4ce79eb9af61b82b90b39b9cce43ab56c049e76c314698f28d39015421eca9e41947b872aa075136025638a1fef1e35e65a3a8fc087eadeffa2dcda383d0abef88db941cf326f9eeb85920a3fae33fbf3568dca68d73332029ffb43030d5f2bf1e4a43b005bf31879984fed55002be51097276ac146a9bc35457baf73542db959f98fe46fce0a894c60549826527e767fae63aca48cf5f67a331bd726888a89c5292b734ba6101c27547d27d54a402f98fac235b2f9b970a60d131505fd1af845bdd804b331f303eb57a3a5df88462317825473e326d73c52c0a661eeeceb36f96139002e45e07559b87af5fabfcf53aec306ed951eafc65499cdf1edbfdeb2ec787b56f6914a4a8d3331d8f89fb483897e2853fe80815f24b7bdf77b3fbee10b61a4831d54e819d76c897448cfe7e2da52875d507b95face098501e637742fb69dd630070da7308853b1afdaa04880093d1c64df61e0dcc2743e22e2f94b43fd21688e7968cc4ad355e7498400ee2035196faba5cf468d3373ddf468c5a896ce87d887d9c531da744dedd7eb2d8c8c106b2a68972b4b0885793a22295df0b9afbd4e545af51c629ca4cb94ff6625c5345ea9bf746c6ffc7bd37021fab4ab77e87d1a26cae3dca4381b2c1f495f8405dce43a171a5c8871f94550d119f34daec73f21ae4d37c306f7b85de276eb54a0caaf16a5f490fd65ba25f8748f2775d958de5e38b1b6f72b6793ebcb48c8af5f8f3e9b1ae90fb28df8d54cecf363adf258d2761cc08e1a2a305d93b1ceeb40785c3d1fa02287c60d1a3908bc52fd2d6b73a7387fc8ae3da5acbb055f5b9aa276c0d729645f7c885409b3d748f0ed7428f7923faa2da4dda4c7572b6c1cdcf6eb23ebe163722d35f997d3b4536f2174e91f18301aa4ae8b8e621048cf6b58dd4f63c6b7e95cfd07aa20764a9874eace6e165e040414bf5dd6757f1918e4ff0d32465faf6903ecd1b9019a53f925abf24fa3e3255f2ebb536c3a3df73db7acc21527c7f2c383e45817e66286db53f3352f25b6e3aa1c4af97e2e3c439d6d7f9f7b86733a94a998835b9af0ee3043f6f2742dcc900f55732d8bb5821fb0f39efad6e2f204e13e1593a00a1461094bbb1fce566e66d6727faaafee0445f7ed0721f74a3eaef27fad4d55a03db8d05593e3cf8a01fc0cb40d06da59dd9d0b12fbb6858d85b6a6048013afb3929ec63699db3b2e9ad7ee894bcd3b7d2a467f696c2b465a84e66b48c5d0f8073da87b30f85448f85126aa49d5af6cac8eb96c8cc89c954276c9bf67d82cbb118a0de0caa8143f0b67a1b00a32dc272a004633714d61d7910586e107e3ab402ab33ff83ed2d6887626866bce9a0f28f45280dddd41f5afa284b36af44177e1f13efceb60b2a0b8ce4a1f27fdaa2a0c5d5449f01f840197482cc6b1c8eec3115a69e2f71fbd0b5d77d0144343e0a79b7a717b324025141ca7621bd7c5447752a14bb18559efe30f5c02517a3b663c59a58f82c22653bd1337b0cda0fa104b4ff7193b87f4af0e7faf83780df7b89e6ac9816de15e78487aa133647d6b0819511ac2386fc20f5f84df5cfea9f1874bcb8a3a672abf0268f946fe7431a6fa70a2a0e0b0d2a5783d818a8c4a1ecc82d367efbd72e72aba73c39d7e399c2f2c97de709aee7db6756f367a6fbc5ff69d3fd108c6b777f070a32a74b70b040f7d6628eace1ca0762168ebdd90c8b09385657998c246d7ca0a46dfbbeca5d5c70c45f3a9018775309a49a35af89bc9f324ff3b6299058cec8cbf83dc9a445bc90fead87c733a4c176986703f0873f1880abb6b94b6960e831ce066021f8b4ab0555e9cb74ad584d01b89f1f641b60967b08e3704db83638dd47410e0455173b4db86284ef2c8e5bd5587204282b9b3f747ff090ed8ab93edc3c622faac6370bd1f2bf6d21ca5afd6624a24b50d1596cde8b564aedba8533ef7de83cff630b5189b275f7e28cbf7583eaff2673640d2f58efc0b6102de58b341b41d83492a437ec05a35b2d7b2bdfc0bb95e2229f132aaedfb23bf40229a5632090d81ba11a1d8f4850d277c446af8abdaa08dbad2ea945e1be5e6bbefbd51139e4e48bdc78da3e688a8cf4f50bd9c53d79df555a4ec7a5be0bbdeb109483f0d1e2a7cefd38ee46aab2fdeafcc8faa09ff9318d895239f9243f48814e15d710f290a637d3c1ce79c5f7d65c6cf30ed62c82317ecbb99dc050bddb20b35ff2bc0ea9298b034159690bfb7944b804f7320a49799d8debb8a357bb2adf7de586b0efdfc20d1e5791fd208547657166f8e813f12afc2717de354b5a0d87f8be45a27e1278ed2d88c1aebd64417684b1b478ab7e352a408bc2a5fcc5f54b577d185f2c87c5c73c840780c2bdfce0a5ffca8e8c15667a8e7f2a707565e0b6ad7bc5e3ecab4af8a2b51d126ac38aa8a35a213e7a20f19891f8b5cc92ab0b2af76a8bd56d327a2a04cfd3a54de9a1a1d9df9d682266fcb6f988f6d5ae051b3959ddf49ec9ee850b74e66d85d47c2ce5350719b522413510294fce0c587658c6441b26e7f3e3f03cb9268b2f74f04bbf4ff59d3b7e54b7ecf698a9c7755a8ff4228cf8b0360769f9f9cd382c3805b8594d4dcc78597938f9cc594d2d4c4c582d792c05f8cccd3804cc79604907d048fc15c145aa3ccdf3b8d526ee94deadd38e557d98792118ca03774212431ff3f54a1b219606e2578a3e6691be878dd1fe49a89eecae55274eb4fdea856c13f59a9d54d8a76d87b2f2d0ea1a11fbb279452f6e75bebc3985b4e926574dc0c66f80fd41010e3e175409da6b10615abb49858bac90c59e37ef21e043f881079f2dd78828a5cabe80e66ca86397fddb33ff338399e03b47776480c484149a8c8bcaddce225064a13fa24bf1c76a5a19bf779476993a8aeecd0e0c637973640d5b5819c19ef99166546c93c4aa1f238af4247dc2942e9ae5b2bff972e32982c460848b9c65be078891e3e5655ba6c5d1726ca4f239c10273bee023f0416141e255ada0caa38e45a1b37532a4f26b0f09764dae367cc6a69ced71718ff884fb50eb9016d06e120daa9eb1accc83759105ac0e006bc7ef0faf5fa7f88753979113667c4c1dc2bee31a3fe81d9d5ab98bab19c0219c563fa5583cbad2f79ad0bf1c331e4739d0e796c75401a8c7451a75a93f16abc563d9a8bbc21fc57c99bddf8b47bb6e6ba54e8ed096eef3d98ecf0e4ccbb0cb3a1f1b9406b5078222a750b3632e4975f13d887fa37a28621b091f525c4d7a1f34324e6fb4b931cb62797b25e282247a65931815b953b82ec73b7792a19bacdd22df767cb8bfeef84cc6ff7fe11ab777e284ec0ac328578dc3aff7747a3bd45f75e162a195ed37dc62639b2ad61bd327cf1dc6856747a9dff06651b3cf4bedabd660c3c5e5236db9e454c258b8adbf0dc053f9d0727409a25e7c8718d8ce5ad52fed7557fe39989fc6a6d2ce6a357165d41810563f8abb51e260e4866fa54eb9ed84e09b41268fa5564a4095f8593d6bf42e0441cb650403cdbc2dfe4a62996b17db760c6baec0b385f0c23ed19c2a16a5eb6afad47b94ef54753e355a2c875fe6e1e7ce7ea872e9b3e953999b969ed12cb4c7ca7510a406d7c5cf4648eea521a1a61fabbade4a88ed38749b702aec8f38ab23c19b7f1ea12dc07765f5f375becb1cb166fc99654b5befe1b962bb5a41cb6ce3abec0fb3d701944bafb8f16c31add4ba1486c8cf5fbc86597cc5acf461beae0a515c0e52e22b174dec30db16b6fc31fb5d481f2736aa65c898797e6e5560f431dfcd70795b41454e002d3e71c9d8554ef8e6cd5a37e102d5e0f91801eea7f5443d21901d7fb82ef75b36566ad7ea8b0cb9d508cce49c258b6b75a43608595ccd322858d93c832988ed1c21e88b943d3d4e1334028c4fdb3f620ca04822e7b9f6152f09cf0673bb204a3d4b35a4271d5d8fa62f6c4a82ab6fd2380e23a4a0abb9a84d8db4b7803dcc74644b810cc13040eaec1e1988cd2d97ac6f55606d1f6f992243a0db879ba6e59ba58bb7a69934933574962b27f48293452e1644569672149d0e7e88bdd226d4de9622a8276301c10cbd4c654a40778c13fbf660e3897bc21afc2c9fe3bc82faa52e25bb0f4a2ef3c9cc9e855dc28998cb0570bb2dfa2c13f50463329f8b7604d06e55aa9d4f672bda06b06c8f4d49d25357039e633870c95fd6e9c7dd7766d36c21c85e5322d754afa11ad05547eed1103c3af188c232536c929e58add81d3ce750ee214ccab861a12f0a40b08d3a064cf590a81e4a493050e1a117235c5c7ce3cccc1298f580402cd8557bc05eb8f422d33dd7f994b0c29e51d1b0d760cc5b2654d6b8e8373a1f1a8559450a2e0138d47acbef0fc6a3fdd54ffcc37c5d361c42a9a6504684ac0af6f489ecafe2f7e59b25e0a51f9989b9bbdeae14bc9c5e2a4c20df719f34c5c526ac0b351d9e45cc0182673516d04fa773293f11c7e0557a37e6f32c1cfed057c7c6bd53e9d688018c86bbbe023df725637408d5f80b72586e1ced97bdc9556cf9e827255b8789aad3ed3a023147d6f83afcfed51d9f88ffc4356efe5bc8a723097f2f28f6fd1a3792c55305a8d88ed22bc8d92d69dfeefb32fe975fa31f1391d8159d380b37fc775ce366317e705a42c85994c0f76bdcdcf24ea50804d1ce38c89943314428ccd15a1e7cf5d4f925916fccfd3bae719b8c054f4fe2203ec7e5f97e8d1ba7e2a9910b3c4236dfb71d9ff3d04a6e822df4b4e4100b6e377e0bff6f5de386f4bb6afdff876bdc203f5ce3f66389fd75e2072f6f9d1fd591d5e585ef4536f1dcf21157f3b57f6118de8f0a7f8224bc45717211e44cbd9d1d297762d791fba6a821c5745a28fd05bbb787fc67f3c8b44b4ea9e85701c075cf3914c2731faaa33ea41808023c0a2280468b0ee1078f36d35bb3595902aeb6a428458e3a5edc736aba1c4c59d9bf7ab5abe4e4e0dd362824ac1246601e1ae893ea104f205fd1ef40307e61d1d89f75b5c0bd0e8f042fefccbda96228cedafae9c927217aaee7b3d9727563f0848b0b61fae6db7907db41d552c4a106bf3eec0c7f8f11200eb481a87278432b1ff3ff1f7b5f1d56e5f6fcbbf7a6bb114142babba5bb531a042405414240444240425a52245e4224051190941011444a404252a4bb4390fb1cc2af9c0bece3f19cefeff7dce7ce3f3cfb1dd67ae75d396bd6cc6710d4357900cfc29487d716904921834e3413c9b2fab5a19c576823007aa5a9af2cbdf6bc1b00d2447b6826cbfaac9802952c3fde95cfe22f57e728c45f0b97733aa3dec5e1bf0802cd6e4d7ba7e0feb084ec07dbde61f8dc5a1ed4bee091aa61fb6c581be58957a9cf392a89c47c2dbd62f5c4b333452582a3598ac0a8390d4141ec548331ca712aef8b3e7b3f8f0811d6a76d9aa40601652878a16fef232005c8a5ddd0291b40cb7c6bac94832e4207b0d3ea776a6d3fd1135dd71a7ae30cbfef84fee43b43f3458e8bab79d6841ff6288e94c94da8cae430a7f688dce44d32676d69845778771b488423b850f3f1f3eb629bb884aa2c9b819b18a3994f46f8882d16c1d91f8daa59f050dd20decba5af2f88e73c7f12e9e873686340a138bc9fab358d025e2b2001c286110d6104336fe3f0b4b13189c6bcd2ef53f14283ccf4e20196a2a9dde77b40cbb281f27cef9fcf947deca49f27d0c10ed7d5fa44faaa404f09906d51205bb0a5ed95cae1ce85c197fcd26045d32335925d6dd0bea3598f936c0542b4dd46b4f62670f931134ecd2f5386081b9ba63c0d452028aa2b2c349315c9cf3fb44704afd30714819947b21cb81a5e602278969e40955881d3ae91df6ff052ad90ab28c530c9ccea533ae1272849f12a828e29333e22aa8428bd25316e25dff284d9c11b8592e6aedf4eadd23a1eaaf68bfbfadb696d523ff3a16158729b4c5d12b7a7a3b9b97db785cb8a04c1fc06c909672e49a11b0c33f7c718b3adc61eb4b60527941a4808fdcc87868159c48d8be9b8385147d8f03a5eafa0a3471975ea448c7e5bb98dc99c3725958b81804a92d1be6502e1e8e4cffc0ea0c492ab97753a2f9d7951fc167f26488facf8677ec0d5426665d05ab9212e6996cfb46e30c4d0e3847c8f118ddee9823517df0850712687e3a65b70d34cff4afb76270f7af339845bcda98750e13dd5c36df65319fc992fe01933bfae6866d14734aafa1596e259b9df54decf7c3bb4eb1266e32f82a49e2333adf9f2dfbd86045aff99ef42f379a0483f7f46c31d5f3ae2e3e26bd3c01ae59ff91710e0bfbb6bbc6e844b584cdb2a1022751ce160fa995fde3a83ab08a36b571e1ccaa214c3f9bded1b9efacffc815994976c386263ad49b2eb77f75b1367ea51527ee68f7c9f20ee5c27497b6ace93a67b658835de4ae404f4d0ea57e71a008c4ff79ee25e5db9833a4de368f2090c0c573786aea139d2d743619d775d5522e7e4d5cbb17fe67fa14af20e82e98e6e13bba8ebd0770f09c78ed2e157beaf79f74ecf979b2ccb6c7e1e3dca191a0efd9f764ff881f61abf8f29d6f5751564bee5d341883920f8fdc5cfc949404698d61a3641569f89ad29615da54016d30adfe47fe643ebff5388040a9f060a1f1a883934ab19b4facf0733829ea408fb37eb87561e9afcd0cad342e143fbbe73fda4ff42f9cb50f8d0e487f67e687c68f543db9fa095ff5dfef918e8bf5fffeff61f360fb0f7f18d411b5b84b95bbdce7ab63cbf66c314c1b8c9807c05fc3d5fe1a682540b809fe7ddfbfd49c680206732fd26c42f42b6e485f3a1e000de748c3cd09b8019bf232b285bd4f9376ca55f5b46914a7e7eb9475ea6105e6e7b0879de83948169665daabbcde762fe80402028175c90d383f2c08c3c1fe333cc060dada1c48d3cb286f2fe73c91441fd2e0f60ae66bd9fd4309c69375dad8fbbed0cd2d5198bbd78a5b447ae32e6fa4482e90aa0c9c8b1bbc8ab40126f1a5385c93f25affabae271c38ea95d43098adabb944b02478aeb1674c5f5de55d9c4b2dd8d1d1fad44b67c380699a92457ab9791dc9d1d5ad7aeb7f74b0174f909ae5329ae89621ef06673de95bcee18ea720bb633976003d9475cd2032c4110db6d88b5371874d07330647e51c51af3eb035b8662ad8ba4dcac4fe931433532ae7a954160b9223515a07cfd6f19a84d11aeba4329ff3bf1387fa53cb4fbe8bf5b3f8accb5efd453c8e2a1e71526167c8a06a5fedf34f083184ef5c7412985e6b3f7837881d4771ae198a3de0cbb74eefbb31dec95a821810e3c93132e5a7db3179a15f3040106ff10664fb23b6e4e652be996d965b7272844bf60df215868a9432d6b6117183d3c3083b90ec79b9971549e9b023e0dc24861f1c6dee7ce2df58786e568465ea512d2938b9427e53c45f433882e959270831104e2012a9a1c3c98162ae03ec1752e7a3fd3488c505cfa3c95a926bceb777701dbe4f91c70879a9d4aae507262cde8528b167130cbd4edc7615933716b3cf1750fe69be2b00f3d3b28520fc4c51013897a8a5ff68ec7ffb16b92cdcee7fe6ac9ee625f1143af525fab857b2714217c2e2ed6b8fd12cb11d97a12453ca97c4b8677677a6efc41736f30067efead38ab99ced2c0ad52b8545d9633fa009d1f09c7febcf2a6085a92bc808ebce1bcd9e59e2525fe489d20978fd843212f02cc325cb4f4c707031528bf9a01b013ac4684356a8e666d167ed34ef0ce5727bf35f1284424f9e49bf390edebbc3a72e41b84e2c827690fbacd0334af8ac5524a59de4fbeca6763222a9e37997187ec05b74edd5cc8fe83b26f48cb80a7b3ec44a7048511edda67b3fd363d65ca3c0dcd0e2d59fcf4f1bcea0e5d13da436b0430f51bd608902a4bd78d60de733369402ef2706a9c66793875dcffe46e0a79d470ec6ec70bf4a7545c4ebd86c1226c1ef5acdcf961e7ac6462782c274181215218bb3dbf410280c80190aef95d0a19ccfefc72aecabc28eeed7d021f6eece1bc2aa580959e4fbe478bfa36d460a6e7779b51e41d58debb2c04a5c4c2ccd7ed362098e4d351462e70c6600cbec21b046c6ca30d2813181597f26fc6fb77a26a6c54ee89665c9d827b3775c5713fc5edf361ca0a08fe04146f53b0400b2c881790efc367f7340c2a97516f4e507844ffc9cb813cd058d640dfef5d67c96677780a70ed93cbf0b2c4ab2e948970525b925b23f760cc3a2378c896dfec111b73caef351f3b0bd3ffaab33034ffc1bfbc30fda0bfef2c4c495e30e784ca732ea4dfe2c557e7a78c397016a6b0b176681f884fe3efdb557975a9468b38b7a669e7de5e6e269992f354c01c1c6018b01bb4a49fb661cf7a8d7bc86fbc7cceb12d2213577eb73baa98884a43b2f2708de47df563d1396df13b05b4a249662f5bf043f6051b4b999d6f04afaf8f9966179dd54e18e5e462afa9ff10f7437cf9870c8c549a50be12299cd0600aac9b956efbb22cfe3a0e9ff92c7ad213013add19917b7b8f89fb374c79acd31972710d5b9a0930a4e9a5647a5a5ec3942b1e1915b17e631a7357ec0c3de81f323bd7bb3564e95ddc692d71ea28f9615404814017db787f740e413e0292caab16ddb5c540d321f5ae77b96b90aedc8e5c3ba45731a49c8dbb2f00ca28d86abce12825fc5d88c49a36614d8fcc6c58cdf76dcfed39dc7684b03eec23170bf4201fa6861a1f448baaa8f147627791e7a699c6e617480946f0eec1f9e1997a95816145e5574fa491ffb747fb82461d262a2fadff3534b38be258a58b300e0d7de7953645501f3c8bc7b147d89c6f7f2e103a581b4b0baa4ac40b683fa2d2ab362592b3ed6a1e12e9e0a9acfb8606f79d8d6e8cd1b38905c341bd10b08c5263274a10f435317c3690efa923ffa1c0190e5b40e8c69a78c34b5b0f23dda306c7973e4c731ef3fc2ffbb404fb18eca429d14f5c709a42ff92c4e8a060f5a5028abc7fd77d0f04da24c3b2a3bf7afb0c2e5c1bdbf8988462e6e92a1cd8c4d04794b542f577b1cf0e46780a8ef95bc6fa68d3e7594cd7de3e63a17a1ce25bcdd2b0fd857d033d2cccaed1179015feb8d31edf69a7dd5dcce2ccaf3a6886daa786ebf17c61097d9a3add6ab7f568a3da81ba518d7579f58c7abdd8b8c8405e5d162b479abc9b132e8793b58e4bb2a29eb2467b1540c8d9e48259e6a8a0fd50f8a1fecb7811b580aac348e17a2e5156f98529abeac063e0388fffcdc071021debccbd22855e6719c2cde9ae1412811cec6afe6de0381ee07b2ae1a7bd242f6c8e56da3da5dc7452ca08b6d54b7ad25948ddd79a015f0616c0521ba6f17107bc973821caf2ce16a787dc9442c6d3fa9e89a61b6871c151858f40e0e06d70e0c543cced92aaa804ecc8c87038ba8bf96c1a6b446bf8189de656b7bccac0946b17cccfc0f6858055c90aa26ef5d8069e27b0e26414b4dc53276642b58962943272dbc0e5dd34b16e381fbb942bac01e7153645d0a88252ff89ee1408328368b515af386c3e5eff5234a112994278027ce7d797de3f661af32482348d0cfc880f2e6777c22ea2fa9b947b3959b7d0da76db3b71de0436180134712378cbc6b36ce4286c357ef57674c238a39978f4df19e5be7c64848873f71ce38629febf861b964a83dee7b8bed3625a24cd06afbf36322f3f81c4036c84543dc75f6a184feab36318d42eaed489a4e5ad676cdbe1f37c744fec4eb72240910267af90f2d407dfdc6150f9c616db0e0701ae1ba7e1ad250e6ffb80a2f1621042c136429edb416bc17b68470d2de78c31c22869040faa6b45980a68a7f07fc0f42a439011d5302b80d2362776deb7982cd35cc97da961fe772dc6ac31e0d0eea77d38eb3b8fc3fecea0f7280f97182ebc638206bdfc4b7136a71094ee38175a0a741441fa1bf5c342b3f5fc9227fb3fd93f87042dce09dab52534fed9612950c6c75f24a8d7aabf393ec07f5e0eb0356c6f38ba18db69fe5f01017fa617a995cc6727e6c4a5eaffc05d41ad01cd79185a1cdb89f9719a860da53cb4e5ea345b183c8806f3d787da21116b9addbc65ea70cbe61cbffcc397089526a69f8daa5c3c5e40b1a60ff44441a9065afb9df87e68b09f7f837e3b79d56f120a143eb4fe878a1afb9bf5ffa37194bf82da7b4c7ed1be27f0fbfc40576095edfa6b721a911ba31de14a781023cf1a8509dffbe7524d11d48d79006458294fc65216be81f4c97802b9f17ba4fd4d1ef61681bedded45e55fb44cd4014ddedb4438af63fc87edc3e83a92e084d27923bf63b2b9dfd22894bbfa0e2b8feee830f10dea5506d70bf90f2a9ccf7c220d5fe9ed133174dbbcef27b91d3b59d86142911726ed7a0bc04a2f41793d380d87875d8d5d73137f8dde8b82615d5aa03d88f9669fd5c12ccdddcaf626f5fd8da8b45c033bcb73f5cee3a8345ec0b45b68c695fd1dc5f7783bba69e1af53a1fab744b19553962a64b21cf6e0cb98002bf0b005756165547990f82e8b8bc4969c494724638d5f22032a31c20c5918ea412010d9a126ee4219157c8b766539add113233228e6a313599f134963df29269ee5680cbead01fe47fd4f78fa7252696f85092a9d712b03a7b03e39ea5c55f3c7193cba7e444bf41e251f6b68ebc316f94c1c2511a9fc5cca91fab1766b58df603a1dc01c2b800523c2a9c05bbdda26be9db9a8e489778f33a39fab93b238676cabd3c3c3b62546fb8db6ad5ff28fe98412c373d4b6ba6e1fc1d5c2fb6ab1f02eece342804aa77271984504d983c098ad27c12dfd0e805d5fb283f6f225882d643cef9d45d9169edffdf00e3b98fa1106dbd98116517610286beb99d7418cd58efa5f8db1c2c2c2c2beba5078d5a524acf45a1fdd267f0c9c37b4231d2fd0076708d6da92671bbd4de9a7589d6193e0155d4e6fa3fdc2bbc1384d36990c1720354cf09db1c385e1551082447f26d87d4a777147593028ab1eb9735c78745fed280e01f5e6611c8210d3a95815a7c5217cd214b404a74557acaaaa3cb78e43daa752a5808686ffdf8a43309b95c413a0655adc52022bde5fe581bd108ef43cbeea49c868a9f776976bbbae59d41c81996389716edab4e1da7c95ff372fe6dc4f1239df16d9fc1f68e9cf9282c022c81f144df8b6d17783544793781dbc7168936ad037f8f2c8f2b94383381e36def2f093445e30a3e4169f6671b47cf7e4fe4d83708a1bee0ab0b8224fcbbdd80bc110df007d8929458330d576e438743e37f76a71a055814e3ad06fd1cffeb10a2793c738e73394265b01971296725d2789728eb2c404b279fa97d61bc524d19537e2c030d36d1d92b83c753efabbece4132a48eadc8959b73221c3b043f0680f4a84bcab98f26626d5762b36eff53ce9d78e6315c840699dcd82bfa2ff9efd7e8d1415841d0c6b436e5d2ea499682a6cd346f3a1e2d9285d7601fa93b1c059274e47ba1b300a3accb57205d50c1cd6c6202f2b22cdcafb37dfcb89d9e656b48ecf22115f68b8bd0e42827f9524a6e9cbfe0a1989d7bcd8f6a35cd926737eacefc7e753585f5ecc277b0aa0d756b906d37a3ded08e8f1ebf4f06dce59c47ac898eea357f12622c64dcbb40f84223daf4016c8565ebad439cf3bf0a0b6b0b8236af27236f7429d5201ff604bdef36baf8391f7989096fd25e1e5762e1bc9bfaa736db4636315ff80adf89cb911f12608d55c69dfcbf19a22dec3058671bb4f3277703251a567dcd49ed79b3debb27c7d7185c655e3a9a57e92dae0161991daac5d617f76b41faeb75cf88ea6ff189809de0f8c163f37f7aa1367017d32bd996293f9f3a5da6c416b5d6f02771e27ee6e5b38d93563755be151e98ac7a573944b5d13ac9ff8851274a6f2fd39e99548e4cadfbc6f07a3d7693dd894f8de6c319252a8dc202ba3deb4c4e92be127d4a3b6435caa182fe168a1b358f9b2cb252963b88e64e9ed7d610b9297d1d965c208fc8f5af01761f0a6c0187106f128f2df3fbb652ab5d27b4a4beb0fb4bec028b89808e9f9da1060bbdd1cc4aa439db29cc7b72b5bfb7e570f8e3e77f90da88e890d6b98a278b1886d49d20584d5e98deb4450205a9b8f6661503edaa1877b6fb9b8f345693ffc74fe58b64f961bdd6a1efffc0b6f745a12bc816f681358b0254a5538de19f76fccaff02cd6c7dc0363d3a4740fc275d334112566228e84f859df7f3f37272e40e7cc72416553041f944bf2318316d9df9feb8a05a80b7d7e2ec277f08bef52d8e524b02fbadbc210e706c81441fd50c1fcab71083d7f8e43781e4c588b398401be3a54b0fdf8db942c723539cfd8de57cdbb29ed53fb21e67a1619dbacac62a8979284c7ee1196bc7235cff7de8e8e924b5e6ce5ba4bfe40ee50ef04438ea15eb1f7f7f70f7186c130c73a09fc711cc281efcbb1c83ffd35fd732547cf4d9c83cd6e9535c232ef8d89d04176fb333eb53dee9ac5716d5828a371908533806445b7a3bcb43269dddc63a475af85db0d138c7449866741d220425e9d227973a81482135b8feabdf01fe192978f9e211c3baba39e2fdc7f2a397e4e20d59f78fbcde45cbf1911d5743e935d7178d4fcaa5a05c9aa4f4453b763669d6fdde0b489606447659eefe4806853b7d99d57736a3baf0748390c2c04979ed51ed6db7eec70c9f81fe13a7b8e9e611c0b07a5e5fe53c971cbbd0dcdda83ec7ec12bc27f4feb20e3ccda8b103ac262dd2e4ac1cf953d09e94d78b490594e0442081e60ded8504622a5660e7687735e9072d1626153aace4f3fca12b074dc23483f84032d27fcb95b11cf150eb4447f54e0f8234cbbd91e6ea622b54a25aa2627bcc88b87a9c5d714091793138a93b8b036b0e1779d0dbb805fb409735ec2522dbbcd022bf8c5eb0637c6f6b254dcb7c13533a973ef0f6a835c383e5322ff100e82778c9782f8d784fb4f252827853e225e40bd70d27bfca3a494e227e2880f0f670c43b135b05a6f59ac8c37b8f7a292bc7905e0fa920ee1fbd0d0c87d4fd3f81094cf37fa84ce54d5245a2df645ffdb8fe4eb86c7818587cee038bf0025e9f0486913d62f4045e03aaa85b81b2afeb5962a3fd0f9f4dfdad0af58322a113626e88476d556eb57cbbf6d0c5387634e0cf3e073755714e8f2cf34fc5cd718b81445add60ccec9b9346dd263c7234888214e9dd5e548820892e30283d5fab2a4020b9bebe1c477cdbb8a7af9531e71ebf5ad876cee21c4ba8fdc982fe08d1458d2ed67701e52a0f8f4bcfe5def04550db77c0c3cc4108d25797f83d4070409134cad07272a87b2deaea26d281b27849f3066bb2b600ab9a7531b81cc5830c35e4225c0ab323f1884e2dd9d466cf34e7fa79ecea2b8d6f70e85f580659daf300846c70066cce2edc3561c75aaf1cb9609ec57e4aeb4c80d7f26efd7f5e6b24ad0b8c13d2f08f7eab314a9dbf3375e09654b8315dd17d8286a8766efda0bcc451b593d0123968c080d96d4dcaf0f7c4689a780c6eae6f3c94d555f28c949c8e12124df71c50cddd5c82cf0b1fcb8d23d359ab408e9c18f4ee3784e936efb39550e9f2ed038a48190e221761eef5d12bbd0c1af3427508a36f079342863ea626eaf978d19045aeb73c01a7c7ce740994fdf33b0dabac920afe40132467f46e3425a9ec4679ab4895c0746a6e0053b30b7b0af5d19c070741292ef6e6cca558aac70bc9c50babe954be839a3607bf90b08cf270ab1d4e295f6f693ab788c725a0441381ea57e9c94bc2a975bc1a8779725641b1c7c894c915782d052614b70602197f3c7846530442c16bc322ad1cce35e5d422d53628edb939fbddcf0cec6c42d1efea58cd507b5a6da090e54447d105ada3213ceae5dc5978459ee8d8b6ee566558bb1213b7770bd294c2104190fd6272f70baddd788b2ae31351351e0d31e174b960913cabd2230a34afd50eb2aa1a13f1863b5304aca693a76ba0eedfa4bf6a7200cf412232beeda9578474bd3b2c806849e1bd3cfaf9089227413dab1addb64235bfa6e80cc936270457bb850552d9adb2bc198327ee53af77622cbfcfdab6f525dcc41ccfbf65dc4fa5a6e4846e933ec54bf382b46b447973e52b8fb5832aa1963e8aec98d2c35ed66b0c2d7a6b319338ebe86910361adb8bd6521657f6b5cf9685c0fcd2ca6068f89575139c4bad946b32b28c8dc2f10d1d193c6aa5c43c2d7b91f5f36e8b66e0992cdf73aa26813f7c86b7c4de1bd5a60ec8ecdb40037441185899ab6623bb75ee2d82a2d669a1c3a2fcff07c37c1601c4d573e97eb9d1e2ffa955ae80b8c726f1b3e5ba683f77aab551c309856c9b4dd8ff2c70a741801f15737f4bd7f074a92e0d04f1fe7eaea0f2849fbe1c39417a872257f8692b4f8d35ff313956047a41df3ff0928c96470fb81b5199c9638f2034a9282f0003a087cb591e5cf5092a709079f54127880e4079601c48e9fff1350929895aa07875a1d177b916328c9c98ac3a003ba27cfc8fe100ef32728c953848380115980ed3f7e3bae23481f3d37fb67a024cd8e6e45494a7e82923c7229c65af93394e4a9dd0aba657a84e54db27e2cdc3f0225a9d179784b0149e7fa0125f9a8ed10971ba69af2cf5092a70a87a63e78a89142b8047ee6ff205ee02e67d8e74be16d5b04b82e8926518682f682b4b11c31d3ad22d71c4b1e93b895000e5a341a6f3895630b472d3eecf4f9187938b7be41174e97446bf0c4878f9bd93bf401bc54f1c3f821c4f40571697521b8b4c43820892faff12d6466c1abd4d7eb5631348c921356bf5f714f4819a17f52e771aad19246e68e059fc5511e3d1e405f3398cda2881dfcd1fd82e7de4b8beab14b8c92fe5d692983727a6ba34416f500ddce9e0b419336fb3c2b65008c00b10a123b0723362531d7ad1dd7dd27962aea2030fb3698e57ee3e19d64444dd4c341b6ca706cdc34cf2b25cecde179d70b95fb9efa8d1966b101a0d2fa870414177fe5a3ff4cf585e7aa2807906850e89c0a20901c84592839e78d3379017d4f6b7db834161c27b2077dd88c2c903dd58fd931cd8c3c84045e2242bb0d9f01d5ab635b8e2e08d263ac5a5bb7366be49c17d269d893ab28a6e2da634c458c148f0c6488bf6120ebe27d87950bc5ac7f0489a53e43ceb856908da4254d6c853c20fac2d1354641c2a5e4292bb9366d782f381c40560a6cf90a4b3dad651c7c17d97bc5be826512d2bf7dc73a3782a3eb422df01d04fab0f5de1b36ec405c4c3be55355d4237cd4b38549a8ba5c1d617c16fbafe2a3f202a8286c9c6588effc3f3e58f5c1907764339283878da0ac46d5317d4cfe12fb611a80d453293f3c245b25204c63a1aa2492eec1ad078253bfcd90917a616ee1e3a582839c1b875e0db02d1b51ae72c65c2196923aca5103c405f4540c7a32b8795ee9a1170393ce695a057911108407e00ed7d945a6d250953533ed40fdb4ae90a4019a2f5ea9c9f7bdc4d2134afbb013f0bc29c17fbd13766276baa8315853c93a1b5e7dd08160885ed0deba46cfdfc6eac8e0bd0bd5e07d1bc6e26b659406f3fd22c20783afe541fc29f9b1cb25246923c0854cf06dd765e0e6663b771cb192294d896a933db7faa3b50236ed216e25dfaec4cb5c422fccdd8fbc6780c65fc635fb1b61177fd7314a10fb15f71df9cffcd0c284ce255304f5f4df290f3a5819b7f73a6963f1e383548b90605645e5182449aad3d7e33fc968bbe2e12f3db9130398b61716e90823377d9abecc94a27b4342a65d42b3586b6d08932b3a94afa16bf808e62fefdf86f963d15838f35cc75414ead8a3736e728d3f9aeb1e0fd0fbe4a1c2fd428468a942f79a1e3f016449d49ade37ebc136a3acf753394b24b1002449b8cb8e46aeca6b68370c54d2552ed6edcfb954d273990c6412b94999198e82409d5b1dde90f6c37965fd20cab9130681769c693640a6db98bf7c2840e252a58e3f3ad74e242108546a2aadb02ef8b314a7cdc29ff95385c9dfaaf64c7c52d480fc3b789c17cd9278a16206f002c2a3dca99ace4f9f3730a467ccf03efd06a2af1df608412eec52b8ae849c63d3067031295f90eb92f3b5dfcfde08c37e5dedb0d3b6e1398a243ab858bb33f83880e0687eee41f56e638d97f446496aaf495ec99a53b9fdf92dacb844a10edb968d7f1f0a57584fdb0640eda6264c94c0b4c1eb65ad6c8ff2a8d7c902565880500e4b46013923c8b80a1304cadcca80f860c2857b0f83feaa0faff66b77fbae68bae4e48e9b98a065874a9a52ccb3d28ca15cd718365441fca3553ee4a6f0a63a609babfa8263b68db9468a0ad57c4583161570fda21594aca9cc9265320ade7a672c59a30e2f31b537b37f378c9e1d771eb795e26628f2eafc3672b08012fede02aa17e2f5c7024aa838dd8b17681811723f35e02bc10bcf593b5be610550e84cbdf188a946d795180bbfde15cb9a9b75dae6e5feb5b4a14e46dd2f16eb963d202881a5088c858c32183c068db60646ff0a13f31b2d4eda8d3d0efbc4ad363e85851a18c9713e68d5f77f28281e6c50035a1372f50264b0ba3d3f86ab9657c33b1d1230bc6b7721819f312ed6a887377696b605010e076f986e14cad519f7fa6254a4467c79cfbf7487b152d3f3680ae01fdcb1b14bda3244e52ff15c8c057c825bd41d8df742249bea5a510bf6b99c7bbaedec63cdcd85d8442d773d95a11c0cf7f182249242f6db871ef652d3e56262aebd72b1c3d3b1f57052308194ce6e54120cb2d731fc404f51f9ad02a81f1a292dbb7181521cc8af86b20a30803eed83ff6bff44d36fb5d961e2f5bb8c844fc99b654855ea90ac1694553b5e9194076a3f22de0da76f76a5e180a58cac948144b48112987ef6b2fe2f5921c210821db50a15df0511a54f0a1f71c2c12e3df70673c269ab313d13ab0ee0b42f2a0289127effb292d919f831478a90b57b10a9b285b43b5cbc9ce05393745d0f08152ff89fd15da48262e94b249be86d983846eaf561272dd8e9f5cf2dc744ea083916a86ba937883a74db2efc5346c1ff540e9b3693ac98c8a3717a7c941d2b46911d3c0a5156754e160f50535e22c98bbd7820a2098789d82ef5a9dd092c42836539fbf3dc4b6fc7430505fe99fb5e2fd79a0de4ac46d93831f4cb073b6ee306e0b11f7b7143a33e61b2e046fd499f68fcd9c74843de31a7b1eb1712e75eca07f8cbf70ce7c8788cda31bdf50bd85919da45981eb1eafddc90d491132dd1993b4c9609f6df5da6053fa89db52914ee7caabbceb3b0875f23a4ed9dbd6b2ae94aa6763679eaee51cbc310b1f072946f68709f1486ff00279f1001c61b6930f93e1f2425f86787fb671c6a9f9425439e6239a806b2bfa006619b712d0b89606e4f0bdcb7fd01cff8976712f78cf8998962da19a133eb17cc2553c851c0479be0dc9f1064f1f5ef8776f469563939393938704a54b463f7402ec1435ad571abdca2012419ff9703cdafdf37350a8d51b715c24226c3b72b23c58527f3a469d48795bfe1482082f27eb50b77bb10f5ed6053cffd1a0ea57f8a71034dfa27f19831c2a9d3850f2ed8d3ba4f8ca6ce13c7109404a46461059c7fe5dd7dedff26d7209c83c37cef3afec1aa7137898f8325df8976ce4b3b1778efe9117783b99dea574ff8ef70702c8840784196fb17055d9b9d9054d18bbe9faa32bc04380905f3fff69b2f44b5403eed65b7b911be4cc41cbea57e5b79135f655bd74beca1d9e10e1aa7e2ce3a747749b9a75199f184e7cb1c1f5e84cc41ed8762a55fbdf0a86db9e979fd05ffc9647c450a933b577c9f8b9250709b9fdc208c6cffa0e491e24f34da3c6655ee0018f348d98a818be115ecfdaf4da46e3edf9dd1e23d9c74b82e6810a3684aa2b80bddcfcca0b185223782ca95b88929dabdba1c6189cb869d31a7545edf924ddea475adf77a85a1f379666d5a8d2fcaac340afaa31bcb7281f4cfecbcebca6a407f6f37d2f37091f00f28b724892b342a0c481677504e809c9cf610db9c6fb99e8e82b9c6adbb1bc190f7c4f64fe0b9b350f906f7b81a1ade0e6232e842048f9ed218a4ced9746e87217b0473f3ab6525f7df808b86e6e84e2c526f28564de7519c8695a9c4ec6d2a71d75208747fac4873f487ce43205193b842dc21c883a6de33845dc01b85e951b0372e566d12288fb8f577a8497a909a0bbca3cba029b23bb81f19870f75b2a7f3b788660685fcd743a4b79e88d13e9852b2036c0496e84cea36a5a2b97486295823e918b861ae5b939ebbed42a6e499705d2dec4d181979fca075bd9eb970ebc2c17ef7bcf5d958a884e07ae5fcf12271493a270843289fe2ea1aebd8f2893d23f93af757944c174f5dedad5f32a3145507782f622e8e8b6edb3a2b382aa9baf33c2ead92f9471c9ef4dd077a8ce97a77e8c67bf537f3df81540af94cb4622f15c85afe45e94cbaa34c4f9916c4298eddeadc064b592671c858a475799f0c3070304ebd9ca5fc648c5b0a2dc43969d2cce0920e17b7f416d54d4060d4a9a97ffda5526a6124b407ca8ae10ad8d4fdbb80fe38b3630e1c48442c927d67b37e8c7dff6093006bb624a7069819c46a8e94a084cc11d7d134b09a81f7c502604b1f8807a0910585712365f32b40f11f3099341b3ba8f2c47866afb1718a7eb4656e25b9bfe739f784cf354878a4d0258903c4be8e9b282af10216f4f560b0abc15dcd05ff255d10443b68aae61d47467a0b0f335f3e2bd2c9447577e25ad7879dd88ae2e801c07eeb10941048ea5ddc6dd3a9aaf00667401035f0e5cc352dc0288ae642724a17ff88b0108f6fe07abe0f94f0b01c4140bc59f93ca21f9ddacb964979a32403df91ea468b13a84a64a31a6c3c6268143bbcc52bb8599c3979cc924f8e4c39fc5c2236745aa83c088040e0c9684a237f5213266dc44655b4191fa8ad4e9eb2f2eece62a5d7baa7283408f22f8a5eb8d7afe2cadb4492c5d2ae3ae7d93fadd1bd8c49746fb80a848b57110729d53c05413a3bc7616c5736767b1f972d437e83a83282f5782942696ddf3b7d18930e6717997f80df7655138b8f77d87d17bc08ef56c21f80adc5bb0a5625cd920146ce328455fac5e839bb66a2fb2d9212bad6d6065dd016a4cbc0793cecbb735708dbf63b03d7667785a8da14cb1fa78c332a95a178269f7b8838ce54e985f06d72208f57de0a32dee865e27313d39bd4915ff173b8d82f8baed915cd85368092d2df59ace3399e07bdee1eadfc42b70a9a9a391aae9a727df6e92b6d20e320204d5357a20340d6be3186ee6f259e1c4ded5a43cbc9ae532e2c294ad27ae6f22ca32c7098b14999fc4d71aa0836d6a69c4497ae1d056623f078686318f647175b31720a2d74582d0e35be4cd14d3af28387cf6bb14353563663ecfca11ea50b08fa4cd8c7e4731d51885c72f8c72b6ff733a69ff2d4a1c4c05e3bb771a9f7dd88a9c1d09a6e0e37db701c62834f31725bbb9a97785de1afb03fc5c25427bd90becfb65f15fe111de23c7b15e028bf67a24583955c75f959d7328ac0f7fe3f146cd06ac179b3fc42cfa145ec60c8ce51d3744b69332cc7cb720ce8ed78c095c81ee51b1802fae795bf661e85bde06e57b1fa4269f38a5b748a47ef48a5fc6ca4fda294ca9e8bda7e0f2115f9c6793da0f846dc2a9fff2769e956b6ab2e7ea42bbc01c83550fd5c711d3324356dd4f0a002fe6bd6632986b0f38c84715c616d11b0d9ef830588015772e9535831bd2eebc50c0fc0f24f75fbccaa462ff3730523ded3d0ff453be300ad6638cd4a53ca1830027912552d33ffb8958fde9ef8d9f2b11518b6e3a7a6ef94f60a4da74c31f287ff04589643f3052f9f80e1466585177f63f63a49e26dc894a8e9fff1318a9139ccd07e03a7077b3118f85bbfe8dede05d3070657c7fc6483db5e5268c6d0e167278f619d0f1f3ff098cd413c2fd6fc348fdb9c5fe3af100e5ab4446852bfbbe09a1088f356a91acc19fac76c3451fa2ab2f195d164f5025074499532816f5dd8adcd102ef9be95568d9840fb7c964344ce17133d0b7c4d99b1ec1fd40ca8fe07ed4ff3fdccfdf7ffd59ffc0c1eaaa26568a42786efbf9dcde829677ee7f1cee8707b891def8f1e6e25dc24ad8bcc77344535b988ec1e6582b647ba4edf1ae4f840d4200633aaa697ce19bef93f01c381996c71ecf0c9bf4d09ac40ec06cbf991255df2c3b8ef0f6fcdf1ce1ad474cb7b907f7f2c359033a6ef442d39e055598d7bf1de1cd0bcc701830485461e8eda58459cda7d9ab24bd832f2bf421669333559a090e9100013c4553317af3d6fee08c4d3b61f2b95a0ece291e456b8b6b09157e6b5216c5558736b8832018ef7eb8a8d3029f4f39d09d663885f6b13cc01bdbe9f06cbd82d4eec931cfa47cab784de5f689c64f969dc8179ab08b06bceb008ca504333f1ac8cda7b7c45931cd839f24e5950b5ac6b882d3a28d715e7acbc61c67fc1fb6ff91f15f5b592b82c8763e4703e60b646b34d25ce049e7e7ff56c67ff03f97f13fec86e567b0e932abe952ec93fb89bd162018aa751878082c0c080e0111e9efbde6b732fe63f9209e7bc0c2f221b8792edf37f45cb31e966fccc6b97c3fbc73fbe7dfe7af9d758f76c8f7ef8276450d2bc1708bead9963d326c48c3e77aa6395c1f095efb27b85217463c7d891edc70fda2494f4538fb9c707586f069762c1fd183323392f7d454d92a8b6032ba3a8b28e356fe34a5a606675afc1632b855df62a42b0bf331927321ba9c7e5471dbd2438c8e245eac3799e1d5dc292ae23c6d5d18001fa997306b5f55f2979fce2fe6465140930f2c12408321c2ad9a8b44e2c4b4130b56cdd6aaea56f7b2a277d2d6e19847d044eee094aada54fb7acf1b81aa522468e0fadec4fc3bb684e84b9ca6559679accba4834c3123083cc9cf74eeaf32bccc9c0dbff235b3623ac24e35df7c2e6d936589f5ad29bda5b96aedbb4e89c2da4da2e7b31c5b16b7a5f4bc794b312b6972fbd47881574c4aaf57c912059c954bba956f63b6d244080af95f8348e46927ddb9cfeba50d88bd167b4abf364a9e0f66c1b334e7bb95a56c151c1a911022dd05f78d22a9c3ea68bf7820f6cbfbc5bf60e2396b9acac85d8b2c78565f76d684427ebd9703974aaadbfe6fef173c40c2348ff5fe8e90210a1e6bdb54b6a2c09afc036fc8d6873ef5626e06145dc65700e943705e3442f24a7becf42046414b5b1ec1168129b662b82fb9a0b45386e7e3839b5ff2c3e557ec6954304742ef74c220cb4444d72ee942db1d02776b13af74664277c53304814441e0663fba8576f3006c38d72ade617646d677b5c711f6dc11ec6a134bb521149af76dbf3ee369871a0c1827551359d68926ad3390b27419688636c1c9656916703d1c2d6ee644b5908639eafc04fa5f7668f92f66afa28edba5c80a253b50f4ff76e79a22a867fdae203cc0b6edd59e60add89b38ee1564bd4aac7c65b96371174ba9bbbe92dad5ce3d36f00074eca3ad05f1d3ac91c570634561452cba1f2a23053fa8bd1eb3671bf319cb1dfeff040000ffff382274ee", + "nonce": "0x97ebf", + "value": "0x0", + "gas": "0xa83d0", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x1c692251bf", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xe17f0dabdf7d36d1733f5f7cc9f64262b7559198448c602a9cdd0d18643708e8", + "s": "0x591e7d2fdd7671e9f42aaf90ebedff405d75f02b572a4baf1b023e8849869c47", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa0", + "hash": "0x02f2f5ca1921826ca3e1512a91a6171960c3dee229a18d15e5496001ba1dc846", + "from": "0x77300c71071eca35cb673a0b7571b2907deb77c7", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0xa9059cbb00000000000000000000000092309be32bdee43d48e9199c907d6b338391d1ec00000000000000000000000000000000000000000000000000000000050f78a0", + "nonce": "0x2d3", + "value": "0x0", + "gas": "0x249f0", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x1c692251bf", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xc1791806c092dad1dfeef35475e96d06e4e4d53a0aafddee57d0ed68efbb7ef2", + "s": "0x588feb91783eae8e8094a9fb206c8c2e8ff8a5f1df2bd658ee084f8f74d43305", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa1", + "hash": "0xf340522352c1a9276b498dacf4a0c403a9c6ed81c2d6fc8b7e1782a6d6348125", + "from": "0xef59219a1f67be77fc9ca2294c4e6795582ebe50", + "to": "0x1c036473a058a83e2eedfc0b46bcf6c5a6838240", + "input": "0x095ea7b3000000000000000000000000000000000022d473030f116ddee9f6b43ac78ba3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "nonce": "0x29f", + "value": "0x0", + "gas": "0xb7b3", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x110d1c1f15", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xb9eb1034b80368ffacc42cd23d40e38b3feed8985650e68047c252dbc95d0b8", + "s": "0x6210a827c079d27d56fdfc0f5b90194c298695e61468c16ea473727fe2d03f6f", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa2", + "hash": "0xc970162f0ca7d41a654536a62b1ba1280126725a1bb3f9022ac0486d45ef4502", + "from": "0x36412cd7965a427d810f7945fe708912c1aea63e", + "to": "0xe6ea7f120f9903a41b127348eea0b01241c8a3b9", + "input": "0x", + "nonce": "0x6e", + "value": "0x1affaf179cf8c00", + "gas": "0x5208", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x110d1c1f15", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0x83c4d761fd9fd542034c2f432dff9e83816dd81b3eb85eb2b301ef2034c09331", + "s": "0x5c448508a3b51b3b5219abb0868cefcb36c48c1bd48f19f325ce52033f26f40", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa3", + "hash": "0x94d8bf71e68377cca8da436818c359ebacb8a40396aaf02a65fcf9194322926f", + "from": "0xe3717b5399b2cbe01d295f7b1c1e72618b7b26f8", + "to": "0xdac17f958d2ee523a2206206994597c13d831ec7", + "input": "0x23b872dd0000000000000000000000007ae3e4d1a6ce0e4866a6574260c2e040c5c4035e000000000000000000000000c75368c5054d883a1923fc2d07cd2033e05a524b00000000000000000000000000000000000000000000000000000000ab4dc280", + "nonce": "0xd", + "value": "0x0", + "gas": "0x11c37", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x197e8d9db3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0x36d13fa6f44d8bc47692ff7cc8330d0d5aee70a06660622b13546c84f09572eb", + "s": "0x7ab51ab6d18ef2265a9dc9b4e5d6ea0b68460685e861d7b3b174d404bebc9ba9", + "yParity": "0x1" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa4", + "hash": "0xd06a0521f5ca7b20b96b0f1431b7c2e6179ce21d85ade015f6c74f3d2e009076", + "from": "0x6cc8dcbca746a6e4fdefb98e1d0df903b107fd21", + "to": "0x4cff49d0a19ed6ff845a9122fa912abcfb1f68a6", + "input": "0xa9059cbb000000000000000000000000951ea683692db53016b8fb7e27e760dbbd5457a800000000000000000000000000000000000000000000087a6db57b14d2160000", + "nonce": "0x5e58b", + "value": "0x0", + "gas": "0x13379", + "gasPrice": "0xe569ebfa3", + "maxPriorityFeePerGas": "0x312d317", + "maxFeePerGas": "0x197e8d9db3", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x0", + "r": "0xfd8a504d405a0a9f71fe8132e7a87f353a396e28fd094df4bf79fc5585a02f10", + "s": "0x21408e03607be3fc584ea641af24ed8619255990db897880ed18e81ccd25b868", + "yParity": "0x0" + }, + { + "blockHash": "0xb1214baed59ee19bce48b3a2df4d9c485848ac91ac3cb286298f93a274eecd3c", + "blockNumber": "0x1000000", + "transactionIndex": "0xa5", + "hash": "0xe49cfdfb8defe32015a95e421f6ed62490c2678b482627649dd125ef03a76df5", + "from": "0x1f9090aae28b8a3dceadf281b0f12828e676c326", + "to": "0x4675c7e5baafbffbca748158becba61ef3b0a263", + "input": "0x", + "nonce": "0x2e4d", + "value": "0xd485f4daf7fe77", + "gas": "0x5208", + "gasPrice": "0xe538bec8c", + "maxPriorityFeePerGas": "0x0", + "maxFeePerGas": "0xe538bec8c", + "accessList": [], + "chainId": "0x1", + "type": "0x2", + "v": "0x1", + "r": "0xddc93b78d8835468e4407ae62f5fe4c6daad2375af1a10af43e320a5cdc5592d", + "s": "0x1ce52acf8553ef1a7f9964ccb9b7038d2b285f40f51b43b2fcc8c31f08ce243e", + "yParity": "0x1" + } + ] +} diff --git a/nimbus_verified_proxy/tests/test_transactions.nim b/nimbus_verified_proxy/tests/test_transactions.nim new file mode 100644 index 0000000000..9d8d427e44 --- /dev/null +++ b/nimbus_verified_proxy/tests/test_transactions.nim @@ -0,0 +1,34 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.used.} + +import + unittest2, + web3/eth_api_types, + stew/io2, + json_rpc/jsonmarshal, + eth/common/eth_types_rlp, + ../rpc/transactions + +proc getBlockFromJson(filepath: string): BlockObject = + var blkBytes = readAllBytes(filepath) + let blk = JrpcConv.decode(blkBytes.get, BlockObject) + return blk + +let blk = getBlockFromJson("nimbus_verified_proxy/tests/block.json") + +suite "test transaction hashing": + test "check tx hash": + for tx in blk.transactions: + if tx.kind == TxOrHashKind.tohTx: + check checkTxHash(tx.tx, tx.tx.hash) + + test "check tx trie root": + let res = verifyTransactions(blk.transactionsRoot, blk.transactions) + + check res.isOk() diff --git a/nimbus_verified_proxy/types.nim b/nimbus_verified_proxy/types.nim index b9eea082b6..70e693ce22 100644 --- a/nimbus_verified_proxy/types.nim +++ b/nimbus_verified_proxy/types.nim @@ -12,6 +12,7 @@ type proxy*: RpcProxy headerStore*: HeaderStore chainId*: UInt256 + maxBlockWalk*: uint64 BlockTag* = eth_api_types.RtBlockIdentifier @@ -23,5 +24,8 @@ proc init*( proxy: RpcProxy, headerStore: HeaderStore, chainId: UInt256, + maxBlockWalk: uint64, ): T = - VerifiedRpcProxy(proxy: proxy, headerStore: headerStore, chainId: chainId) + VerifiedRpcProxy( + proxy: proxy, headerStore: headerStore, chainId: chainId, maxBlockWalk: maxBlockWalk + ) From 1b1f07e23bd97ba5d2cab5cf2cec24099012a7c9 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Thu, 3 Jul 2025 22:41:03 +0800 Subject: [PATCH 119/138] Stateless: Simplify the witness table data structure (#3446) --- execution_chain/db/ledger.nim | 47 +++++++++++------------------ portal/evm/async_evm.nim | 28 ++++++++--------- tests/test_ledger.nim | 57 ++++++++++++++++++----------------- 3 files changed, 59 insertions(+), 73 deletions(-) diff --git a/execution_chain/db/ledger.nim b/execution_chain/db/ledger.nim index a97c8bd3fd..2470c2f121 100644 --- a/execution_chain/db/ledger.nim +++ b/execution_chain/db/ledger.nim @@ -42,13 +42,13 @@ const when statelessEnabled: type - WitnessKey* = object - storageMode*: bool - address*: Address - codeTouched*: bool - storageSlot*: UInt256 + WitnessKey* = tuple[ + address: Address, + slot: Opt[UInt256] + ] - WitnessTable* = OrderedTable[(Address, Hash32), WitnessKey] + # Maps witness keys to the codeTouched flag + WitnessTable* = OrderedTable[WitnessKey, bool] type AccountFlag = enum @@ -98,8 +98,7 @@ type when statelessEnabled: witnessKeys: WitnessTable ## Used to collect the keys of all read accounts, code and storage slots. - ## Maps a tuple of address and hash of the key (address or slot) to the - ## witness key which can be either a storage key or an account key + ## Maps a tuple of address and slot (optional) to the codeTouched flag. ReadOnlyLedger* = distinct LedgerRef @@ -171,12 +170,9 @@ proc getAccount( shouldCreate = true; ): AccountRef = when statelessEnabled: - let lookupKey = (address, address.toAccountKey) + let lookupKey = (address, Opt.none(UInt256)) if not ac.witnessKeys.contains(lookupKey): - ac.witnessKeys[lookupKey] = WitnessKey( - storageMode: false, - address: address, - codeTouched: false) + ac.witnessKeys[lookupKey] = false # search account from layers of cache var sp = ac.savePoint @@ -465,13 +461,10 @@ proc getCode*(ac: LedgerRef, address: Address, returnHash: static[bool] = false): auto = when statelessEnabled: - let lookupKey = (address, address.toAccountKey) + let lookupKey = (address, Opt.none(UInt256)) # We overwrite any existing record here so that codeTouched is always set to # true even if an account was previously accessed without touching the code - ac.witnessKeys[lookupKey] = WitnessKey( - storageMode: false, - address: address, - codeTouched: true) + ac.witnessKeys[lookupKey] = true let acc = ac.getAccount(address, false) if acc.isNil: @@ -532,11 +525,9 @@ proc getCommittedStorage*(ac: LedgerRef, address: Address, slot: UInt256): UInt2 let acc = ac.getAccount(address, false) when statelessEnabled: - let lookupKey = (address, slot.toSlotKey) + let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): - ac.witnessKeys[lookupKey] = WitnessKey( - storageMode: true, - storageSlot: slot) + ac.witnessKeys[lookupKey] = false if acc.isNil: return @@ -546,11 +537,9 @@ proc getStorage*(ac: LedgerRef, address: Address, slot: UInt256): UInt256 = let acc = ac.getAccount(address, false) when statelessEnabled: - let lookupKey = (address, slot.toSlotKey) + let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): - ac.witnessKeys[lookupKey] = WitnessKey( - storageMode: true, - storageSlot: slot) + ac.witnessKeys[lookupKey] = false if acc.isNil: return @@ -635,11 +624,9 @@ proc setStorage*(ac: LedgerRef, address: Address, slot, value: UInt256) = acc.flags.incl {Alive} when statelessEnabled: - let lookupKey = (address, slot.toSlotKey) + let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): - ac.witnessKeys[lookupKey] = WitnessKey( - storageMode: true, - storageSlot: slot) + ac.witnessKeys[lookupKey] = false let oldValue = acc.storageValue(slot, ac) if oldValue != value: diff --git a/portal/evm/async_evm.nim b/portal/evm/async_evm.nim index 25d26b09fc..e850d156de 100644 --- a/portal/evm/async_evm.nim +++ b/portal/evm/async_evm.nim @@ -180,23 +180,21 @@ proc callFetchingState( # state queries are still issued in the background just incase the state is # needed in the next iteration. var stateFetchDone = false - for k, v in witnessKeys: - let (adr, _) = k + for k, codeTouched in witnessKeys: + let (adr, maybeSlot) = k if adr == default(Address): continue - if v.storageMode: - let slotIdx = (adr, v.storageSlot) - if slotIdx notin fetchedStorage: - debug "Fetching storage slot", address = adr, slotKey = v.storageSlot - let storageFut = evm.backend.getStorage(header, adr, v.storageSlot) + if maybeSlot.isSome(): + let slot = maybeSlot.get() + if (adr, slot) notin fetchedStorage: + debug "Fetching storage slot", address = adr, slot + let storageFut = evm.backend.getStorage(header, adr, slot) if not stateFetchDone: - storageQueries.add(StorageQuery.init(adr, v.storageSlot, storageFut)) + storageQueries.add(StorageQuery.init(adr, slot, storageFut)) if not optimisticStateFetch: stateFetchDone = true else: - doAssert(adr == v.address) - if adr notin fetchedAccounts: debug "Fetching account", address = adr let accFut = evm.backend.getAccount(header, adr) @@ -205,7 +203,7 @@ proc callFetchingState( if not optimisticStateFetch: stateFetchDone = true - if v.codeTouched and adr notin fetchedCode: + if codeTouched and adr notin fetchedCode: debug "Fetching code", address = adr let codeFut = evm.backend.getCode(header, adr) if not stateFetchDone: @@ -336,13 +334,13 @@ proc createAccessList*( # returned in the callResult. var al = access_list.AccessList.init() - for lookupKey, witnessKey in witnessKeys: - let (adr, _) = lookupKey + for k, codeTouched in witnessKeys: + let (adr, maybeSlot) = k if adr == fromAdr: continue - if witnessKey.storageMode: - al.add(adr, witnessKey.storageSlot) + if maybeSlot.isSome(): + al.add(adr, maybeSlot.get()) else: al.add(adr) diff --git a/tests/test_ledger.nim b/tests/test_ledger.nim index 0fe7709fa6..d52a9fb481 100644 --- a/tests/test_ledger.nim +++ b/tests/test_ledger.nim @@ -657,11 +657,11 @@ proc runLedgerBasicOperationsTests() = let witnessKeys = ac.getWitnessKeys() - keyData = witnessKeys.getOrDefault((addr1, addr1.toAccountKey)) + key = (addr1, Opt.none(UInt256)) check: witnessKeys.len() == 1 - keyData.address == addr1 - keyData.codeTouched == false + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false test "Witness keys - Get code": var @@ -672,11 +672,11 @@ proc runLedgerBasicOperationsTests() = let witnessKeys = ac.getWitnessKeys() - keyData = witnessKeys.getOrDefault((addr1, addr1.toAccountKey)) + key = (addr1, Opt.none(UInt256)) check: witnessKeys.len() == 1 - keyData.address == addr1 - keyData.codeTouched == true + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == true test "Witness keys - Get storage": var @@ -688,10 +688,11 @@ proc runLedgerBasicOperationsTests() = let witnessKeys = ac.getWitnessKeys() - keyData = witnessKeys.getOrDefault((addr1, slot1.toSlotKey)) + key = (addr1, Opt.some(slot1)) check: witnessKeys.len() == 2 - keyData.storageSlot == slot1 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false test "Witness keys - Set storage": var @@ -703,10 +704,11 @@ proc runLedgerBasicOperationsTests() = let witnessKeys = ac.getWitnessKeys() - keyData = witnessKeys.getOrDefault((addr1, slot1.toSlotKey)) + key = (addr1, Opt.some(slot1)) check: witnessKeys.len() == 2 - keyData.storageSlot == slot1 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false test "Witness keys - Get account, code and storage": var @@ -728,31 +730,30 @@ proc runLedgerBasicOperationsTests() = let witnessKeys = ac.getWitnessKeys() check witnessKeys.len() == 5 - var keysList = newSeq[(Address, WitnessKey)]() + var keysList = newSeq[(WitnessKey, bool)]() for k, v in witnessKeys: - let (adr, _) = k - keysList.add((adr, v)) + keysList.add((k, v)) check: - keysList[0][0] == addr1 - keysList[0][1].address == addr1 - keysList[0][1].codeTouched == true + keysList[0][0].address == addr1 + keysList[0][0].slot == Opt.none(UInt256) + keysList[0][1] == true - keysList[1][0] == addr2 - keysList[1][1].address == addr2 - keysList[1][1].codeTouched == true + keysList[1][0].address == addr2 + keysList[1][0].slot == Opt.none(UInt256) + keysList[1][1] == true - keysList[2][0] == addr2 - keysList[2][1].storageSlot == slot1 - - keysList[3][0] == addr1 - keysList[3][1].storageSlot == slot1 - - keysList[4][0] == addr3 - keysList[4][1].address == addr3 - keysList[4][1].codeTouched == false + keysList[2][0].address == addr2 + keysList[2][0].slot == Opt.some(slot1) + keysList[2][1] == false + keysList[3][0].address == addr1 + keysList[3][0].slot == Opt.some(slot1) + keysList[3][1] == false + keysList[4][0].address == addr3 + keysList[4][0].slot == Opt.none(UInt256) + keysList[4][1] == false # ------------------------------------------------------------------------------ From 65af60139b69dee4b87eef12e079cebea02fb88a Mon Sep 17 00:00:00 2001 From: andri lim Date: Fri, 4 Jul 2025 08:26:08 +0700 Subject: [PATCH 120/138] Add txFrame id to FC.validateBlock log (#3443) --- execution_chain/core/chain/forked_chain.nim | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index af3229ab5f..8d6291a449 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -396,7 +396,9 @@ proc validateBlock(c: ForkedChainRef, excessBlobGas: blk.header.excessBlobGas, parentBeaconBlockRoot: blk.header.parentBeaconBlockRoot, requestsHash: blk.header.requestsHash, - ) + ), + parentTxFrame=cast[uint](parentFrame), + txFrame=cast[uint](txFrame) var receipts = c.processBlock(parent.header, txFrame, blk, blkHash, finalized).valueOr: txFrame.dispose() From 2f02b67971b96f03cef7b4a637e58d60fe7059ab Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 4 Jul 2025 10:37:07 +0200 Subject: [PATCH 121/138] Revert "defer gc during block processing (#3384)" (#3445) Deferred GC seemed like a good idea to reduce the amount of work done during block processing, but a side effect of this is that more memory ends up being allocated in certain workloads which in turn causes an overall slowdown, with a long test showing a net performance effect that hovers around 0% and more memory usage. In particular, the troublesome range around 2M sees a 10-15% slowdown and an ugly memory usage spike. Reverting for now - it might be worth revisiting in the future under different memory allocation patters, but as usual, it's better to not do work at all (like in #3444) than to do work faster. This reverts commit 3a009158d3e381562cd4ff855033533dbeadd404. --- .../core/executor/process_block.nim | 18 ++++++------------ execution_chain/utils/utils.nim | 15 --------------- 2 files changed, 6 insertions(+), 27 deletions(-) diff --git a/execution_chain/core/executor/process_block.nim b/execution_chain/core/executor/process_block.nim index a4cfa9a390..644eb112f4 100644 --- a/execution_chain/core/executor/process_block.nim +++ b/execution_chain/core/executor/process_block.nim @@ -280,21 +280,15 @@ proc processBlock*( taskpool: Taskpool = nil, ): Result[void, string] = ## Generalised function to processes `blk` for any network. + ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) - # Processing a block involves making lots and lots of small memory allocations - # meaning that GC overhead can make up for 15% of processing time in extreme - # cases - since each block is bounded in the amount of memory needed, we can - # run collection once per block instead. - deferGc: - ?vmState.procBlkPreamble(blk, skipValidation, skipReceipts, skipUncles, taskpool) + # EIP-3675: no reward for miner in POA/POS + if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): + vmState.calculateReward(blk.header, blk.uncles) - # EIP-3675: no reward for miner in POA/POS - if not vmState.com.proofOfStake(blk.header, vmState.ledger.txFrame): - vmState.calculateReward(blk.header, blk.uncles) + ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) - ?vmState.procBlkEpilogue(blk, skipValidation, skipReceipts, skipStateRootCheck) - - ok() + ok() # ------------------------------------------------------------------------------ # End diff --git a/execution_chain/utils/utils.nim b/execution_chain/utils/utils.nim index 3d818e9c2c..50fc3f3350 100644 --- a/execution_chain/utils/utils.nim +++ b/execution_chain/utils/utils.nim @@ -173,18 +173,3 @@ func weiAmount*(w: Withdrawal): UInt256 = func isGenesis*(header: Header): bool = header.number == 0'u64 and header.parentHash == GENESIS_PARENT_HASH - -template deferGc*(body: untyped): untyped = - when declared(GC_disable): - GC_disable() - - when declared(GC_enable): - defer: - GC_enable() - # Perform a small allocation which indirectly runs the garbage collector - - # unlike GC_fullCollect, this will use the usual nim heuristic for running - # the cycle colllector (which would be extremely expensive to run on each - # collection) - discard newSeq[int](1) - - body From 0eea2fa99451c9353dec7fca5546124e0a20c6be Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 4 Jul 2025 10:39:37 +0200 Subject: [PATCH 122/138] aristo: switch to vector memtable (#3447) Every time we persist, we collect all changes into a batch and write that batch to a memtable which rocksdb lazily will write to disk using a background thread. The default implementation of the memtable in rocksdb is a skip list which can handle concurrent writes while still allowing lookups. We're not using concurrent inserts and the skip list comes with significant overhead both when writing and when reading. Here, we switch to a vector memtable which is faster to write but terrible to read. To compensate, we then proceed to flush the memtable eagerly to disk which is a blocking operation. One would think that the blocking of the main thread this would be bad but it turns out that creating the skip list, also a blocking operation, is even slower, resulting in a net win. Coupled with this change, we also make the "lower" levels bigger effectively reducing the average number of levels that must be looked at to find recently written data. This could lead to some write amplicification which is offset by making each file smaller and therefore making compactions more targeted. Taken together, this results in an overall import speed boost of about 3-4%, but above all, it reduces the main thread blocking time during persist. pre (for 8k blocks persisted around block 11M): ``` DBG 2025-07-03 15:58:14.053+02:00 Core DB persisted kvtDur=8ms182us947ns mptDur=4s640ms879us492ns endDur=10s50ms862us669ns stateRoot=none() ``` post: ``` DBG 2025-07-03 14:48:59.426+02:00 Core DB persisted kvtDur=12ms476us833ns mptDur=4s273ms629us840ns endDur=3s331ms171us989ns stateRoot=none() ``` --- .../aristo/aristo_init/rocks_db/rdb_put.nim | 2 +- .../db/core_db/backend/aristo_rocksdb.nim | 65 +++++++++---------- .../db/core_db/backend/rocksdb_desc.nim | 14 +++- execution_chain/db/core_db/base.nim | 10 +++ .../db/kvt/kvt_init/rocks_db/rdb_put.nim | 2 +- 5 files changed, 55 insertions(+), 38 deletions(-) diff --git a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim index 1187e4c1b5..baeb23f8ff 100644 --- a/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim +++ b/execution_chain/db/aristo/aristo_init/rocks_db/rdb_put.nim @@ -46,7 +46,7 @@ proc rollback*(rdb: var RdbInst, session: SharedWriteBatchRef) = proc commit*(rdb: var RdbInst, session: SharedWriteBatchRef): Result[void,(AristoError,string)] = if not session.isClosed(): defer: session.close() - rdb.baseDb.commit(session).isOkOr: + rdb.baseDb.commit(session, rdb.vtxCol).isOkOr: const errSym = RdbBeDriverWriteError when extraTraceMessages: trace logTxt "commit", error=errSym, info=error diff --git a/execution_chain/db/core_db/backend/aristo_rocksdb.nim b/execution_chain/db/core_db/backend/aristo_rocksdb.nim index ec8c0f3769..0fba6eeac6 100644 --- a/execution_chain/db/core_db/backend/aristo_rocksdb.nim +++ b/execution_chain/db/core_db/backend/aristo_rocksdb.nim @@ -73,17 +73,22 @@ proc toRocksDb*( if opts.writeBufferSize > 0: cfOpts.writeBufferSize = opts.writeBufferSize - # When data is written to rocksdb, it is first put in an in-memory table - # whose index is a skip list. Since the mem table holds the most recent data, - # all reads must go through this skiplist which results in slow lookups for - # already-written data. - # We enable a bloom filter on the mem table to avoid this lookup in the cases - # where the data is actually on disk already (ie wasn't updated recently). - # TODO there's also a hashskiplist that has both a hash index and a skip list - # which maybe could be used - uses more memory, requires a key prefix - # extractor - cfOpts.memtableWholeKeyFiltering = true - cfOpts.memtablePrefixBloomSizeRatio = 0.1 + # When data is written to rocksdb, it is first put in an in-memory table. The + # default implementation is a skip list whose overhead is quite significant + # both when inserting and during lookups - up to 10% CPU time has been + # observed in it. + # Instead of using a skip list, we'll bulk-load changes into a vector which + # immediately is flushed to L0/1 thus avoiding memtables completely (our own + # in-memory caches perform a similar task with less serialization). + # A downside of this approach is that the memtable *has* to be flushed in the + # main thread instead of this operation happening in the background - however, + # the time it takes to flush is less than it takes to build the skip list, so + # this ends up being a net win regardless. + cfOpts.setMemtableVectorRep() + + # L0 files may overlap, so we want to push them down to L1 quickly so as to + # not have to read/examine too many files to find data + cfOpts.level0FileNumCompactionTrigger = 2 # ZSTD seems to cut database size to 2/3 roughly, at the time of writing # Using it for the bottom-most level means it applies to 90% of data but @@ -96,20 +101,17 @@ proc toRocksDb*( # https://github.com/facebook/rocksdb/wiki/Dictionary-Compression cfOpts.bottommostCompression = Compression.zstdCompression - # TODO In the AriVtx table, we don't do lookups that are expected to result - # in misses thus we could avoid the filter cost - this does not apply to - # other tables since their API admit queries that might result in - # not-found - specially the KVT which is exposed to external queries and - # the `HashKey` cache (AriKey) - # https://github.com/EighteenZi/rocksdb_wiki/blob/master/Memory-usage-in-RocksDB.md#indexes-and-filter-blocks - # https://github.com/facebook/rocksdb/blob/af50823069818fc127438e39fef91d2486d6e76c/include/rocksdb/advanced_options.h#L696 - # cfOpts.optimizeFiltersForHits = true - - cfOpts.maxBytesForLevelBase = cfOpts.writeBufferSize + # With the default options, we end up with 512MB at the base level - a + # multiplier of 16 means that we can fit 128GB in the next two levels - the + # more levels, the greater the read amplification at an expense of write + # amplification - given that we _mostly_ read, this feels like a reasonable + # tradeoff. + cfOpts.maxBytesForLevelBase = cfOpts.writeBufferSize * 8 + cfOpts.maxBytesForLevelMultiplier = 16 # Reduce number of files when the database grows cfOpts.targetFileSizeBase = cfOpts.writeBufferSize - cfOpts.targetFileSizeMultiplier = 6 + cfOpts.targetFileSizeMultiplier = 4 # We certainly don't want to re-compact historical data over and over cfOpts.ttl = 0 @@ -118,6 +120,9 @@ proc toRocksDb*( let dbOpts = defaultDbOptions(autoClose = true) dbOpts.maxOpenFiles = opts.maxOpenFiles + # Needed for vector memtable + dbOpts.allowConcurrentMemtableWrite = false + if opts.rowCacheSize > 0: # Good for GET queries, which is what we do most of the time - however, # because we have other similar caches at different abstraction levels in @@ -126,20 +131,12 @@ proc toRocksDb*( # https://github.com/facebook/rocksdb/blob/af50823069818fc127438e39fef91d2486d6e76c/include/rocksdb/options.h#L1276 dbOpts.rowCache = cacheCreateLRU(opts.rowCacheSize, autoClose = true) - # Without this option, WAL files might never get removed since a small column - # family (like the admin CF) with only tiny writes might keep it open - this - # negatively affects startup times since the WAL is replayed on every startup. - # https://github.com/facebook/rocksdb/blob/af50823069818fc127438e39fef91d2486d6e76c/include/rocksdb/options.h#L719 - # Flushing the oldest - let writeBufferSize = - if opts.writeBufferSize > 0: opts.writeBufferSize else: cfOpts.writeBufferSize - - # The larger the value, the fewer files will be created but the longer the - # startup time (because this much data must be replayed) - dbOpts.maxTotalWalSize = 3 * writeBufferSize + 1024 * 1024 - dbOpts.keepLogFileNum = 16 # No point keeping 1000 log files around... + # Parallelize L0 -> Ln compaction + # https://github.com/facebook/rocksdb/wiki/Subcompaction + dbOpts.maxSubcompactions = dbOpts.maxBackgroundJobs + (dbOpts, cfOpts) # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/core_db/backend/rocksdb_desc.nim b/execution_chain/db/core_db/backend/rocksdb_desc.nim index ec3c1dc675..caf119711e 100644 --- a/execution_chain/db/core_db/backend/rocksdb_desc.nim +++ b/execution_chain/db/core_db/backend/rocksdb_desc.nim @@ -10,7 +10,7 @@ {.push raises: [].} -import std/[os, sequtils, sets], rocksdb +import std/[os, sequtils, sets], rocksdb, chronicles export rocksdb, sets @@ -30,6 +30,7 @@ type refs*: int commits*: int closes*: int + families*: seq[ColFamilyReadWrite] func dataDir*(baseDir: string): string = baseDir / BaseFolder / DataFolder @@ -56,13 +57,22 @@ proc close*(session: SharedWriteBatchRef) = session.commits = 0 session.closes = 0 + proc commit*( - rdb: RocksDbInstanceRef, session: SharedWriteBatchRef + rdb: RocksDbInstanceRef, session: SharedWriteBatchRef, cf: ColFamilyReadWrite ): Result[void, string] = session.commits += 1 + session.families.add cf if session.commits == session.refs: # Write to disk if everyone that opened a session also committed it ?rdb.db.write(session.batch) + # This flush forces memtables to be written to disk, which is necessary given + # the use of vector memtables which have very bad lookup performance. + rdb.db.flush(session.families.mapIt(it.handle())).isOkOr: + # Not sure what to do here - the commit above worked so it would be strange + # to have an error here + warn "Could not flush database", error + ok() proc open*( diff --git a/execution_chain/db/core_db/base.nim b/execution_chain/db/core_db/base.nim index 6fed32dfc6..69a25bd396 100644 --- a/execution_chain/db/core_db/base.nim +++ b/execution_chain/db/core_db/base.nim @@ -12,6 +12,8 @@ import std/typetraits, + chronicles, + chronos/timer, eth/common/[accounts, base, hashes], ../../constants, ../[kvt, aristo], @@ -98,15 +100,23 @@ proc persist*( # kvt changes written to memory but not to disk because of an aristo # error), we have to panic instead. + let kvtTick = Moment.now() db.kvt.persist(kvtBatch[], txFrame.kTx) + let mptTick = Moment.now() db.mpt.persist(mptBatch[], txFrame.aTx, stateRoot) + let endTick = Moment.now() db.kvt.putEndFn(kvtBatch[]).isOkOr: raiseAssert $error db.mpt.putEndFn(mptBatch[]).isOkOr: raiseAssert $error + debug "Core DB persisted", + kvtDur = mptTick - kvtTick, + mptDur = endTick - mptTick, + endDur = Moment.now() - endTick, + stateRoot else: discard kvtBatch.expect("should always be able to create batch") discard mptBatch.expect("should always be able to create batch") diff --git a/execution_chain/db/kvt/kvt_init/rocks_db/rdb_put.nim b/execution_chain/db/kvt/kvt_init/rocks_db/rdb_put.nim index 61fa3ed669..39aaaea850 100644 --- a/execution_chain/db/kvt/kvt_init/rocks_db/rdb_put.nim +++ b/execution_chain/db/kvt/kvt_init/rocks_db/rdb_put.nim @@ -52,7 +52,7 @@ proc rollback*(rdb: var RdbInst, session: SharedWriteBatchRef) = proc commit*(rdb: var RdbInst, session: SharedWriteBatchRef): Result[void,(KvtError,string)] = if not session.isClosed(): defer: session.close() - rdb.baseDb.commit(session).isOkOr: + rdb.baseDb.commit(session, rdb.store[KvtGeneric]).isOkOr: const errSym = RdbBeDriverWriteError when extraTraceMessages: trace logTxt "commit", error=errSym, info=error From 00d2ad4e7a772ed27b2656ea4fd96fdc5564c049 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 4 Jul 2025 12:28:18 +0200 Subject: [PATCH 123/138] reuse VertexRef instance on update (#3444) When updates to the MPT happen, a new VertexRef is allocated every time - this keeps the code simple but has the significant downside that updates cause unnecessary allocations. Instead of allocating a new `VertexRef` on every update, we can update the existing one provided that it is not shared. We can prevent it from being shared by copying it eagerly when it's added to the layer. A downside of this approach is that we also have to make a copy when invalidating hash keys, which affects branch and account nodes mainly. The tradeoff seems well worth it though, specially for imports that clock a nice perf boost, like in this little test: ``` (21005462, 21008193] 14.46 15.50 2,479.35 2,656.98 9m26s 8m48s 7.16% 7.16% -6.69% (21013654, 21016385] 15.28 16.14 2,523.74 2,665.83 8m56s 8m27s 5.63% 5.63% -5.33% (21021846, 21024577] 15.52 17.66 2,539.25 2,889.61 8m47s 7m43s 13.80% 13.80% -12.12% blocks: 16384, baseline: 27m10s, contender: 24m59s Time (total): -2m10s, -8.00% ``` --- execution_chain/db/aristo/aristo_compute.nim | 6 +-- execution_chain/db/aristo/aristo_delete.nim | 18 +++---- .../db/aristo/aristo_desc/desc_error.nim | 54 ------------------- execution_chain/db/aristo/aristo_hike.nim | 10 ---- execution_chain/db/aristo/aristo_layers.nim | 49 +++++++++++++---- execution_chain/db/aristo/aristo_merge.nim | 31 +++++------ 6 files changed, 67 insertions(+), 101 deletions(-) diff --git a/execution_chain/db/aristo/aristo_compute.nim b/execution_chain/db/aristo/aristo_compute.nim index 16828e33b3..9e0cacb30c 100644 --- a/execution_chain/db/aristo/aristo_compute.nim +++ b/execution_chain/db/aristo/aristo_compute.nim @@ -64,7 +64,7 @@ func leave(batch: var WriteBatch, nibble: uint8) = proc putKeyAtLevel( db: AristoTxRef, rvid: RootedVertexID, - vtx: VertexRef, + vtx: BranchRef, key: HashKey, level: int, batch: var WriteBatch, @@ -268,8 +268,8 @@ proc computeKeyImpl( # root key also changing while leaves that have never been hashed will see # their hash being saved directly to the backend. - if vtx.vType notin Leaves: - ?db.putKeyAtLevel(rvid, vtx, key, level, batch) + if vtx.vType in Branches: + ?db.putKeyAtLevel(rvid, BranchRef(vtx), key, level, batch) ok (key, level) proc computeKeyImpl( diff --git a/execution_chain/db/aristo/aristo_delete.nim b/execution_chain/db/aristo/aristo_delete.nim index 06eaa2a3ea..28f9b0e10e 100644 --- a/execution_chain/db/aristo/aristo_delete.nim +++ b/execution_chain/db/aristo/aristo_delete.nim @@ -119,9 +119,8 @@ proc deleteImpl( ok(nil) else: # Clear the removed leaf from the branch (that still contains other children) - let brDup = brVtx.dup + let brDup = db.layersUpdate((hike.root, br.vid), brVtx) discard brDup.setUsed(uint8 hike.legs[^2].nibble, false) - db.layersPutVtx((hike.root, br.vid), brDup) ok(nil) @@ -186,7 +185,8 @@ proc deleteStorageData*( let wpAcc = accHike.legs[^1].wp - stoID = AccLeafRef(wpAcc.vtx).stoID + accVtx = AccLeafRef(wpAcc.vtx) + stoID = accVtx.stoID if not stoID.isValid: return ok() # Trying to delete something that doesn't exist is ok @@ -198,8 +198,9 @@ proc deleteStorageData*( return ok() return err(error[1]) - # Mark account path Merkle keys for update, except for the vtx we update below - db.layersResKeys(accHike, skip = if stoHike.legs.len == 1: 1 else: 0) + # Mark account path Merkle keys for update - the leaf key is not stored so no + # need to mark it + db.layersResKeys(accHike, skip = 1) let otherLeaf = ?db.deleteImpl(stoHike) db.layersPutStoLeaf(mixPath, nil) @@ -212,10 +213,9 @@ proc deleteStorageData*( # If there was only one item (that got deleted), update the account as well if stoHike.legs.len == 1: # De-register the deleted storage tree from the account record - let leaf = AccLeafRef(wpAcc.vtx).dup # Dup on modify + let leaf = db.layersUpdate((accHike.root, wpAcc.vid), accVtx) # Dup on modify leaf.stoID.isValid = false db.layersPutAccLeaf(accPath, leaf) - db.layersPutVtx((accHike.root, wpAcc.vid), leaf) ok() @@ -246,10 +246,10 @@ proc deleteStorageTree*( ?db.delStoTreeImpl(stoID.vid, accPath) # De-register the deleted storage tree from the accounts record - let leaf = accVtx.dup # Dup on modify + let leaf = db.layersUpdate((accHike.root, wpAcc.vid), accVtx) # Dup on modify leaf.stoID.isValid = false db.layersPutAccLeaf(accPath, leaf) - db.layersPutVtx((accHike.root, wpAcc.vid), leaf) + ok() # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/aristo/aristo_desc/desc_error.nim b/execution_chain/db/aristo/aristo_desc/desc_error.nim index d8cc16be54..4da47c31f7 100644 --- a/execution_chain/db/aristo/aristo_desc/desc_error.nim +++ b/execution_chain/db/aristo/aristo_desc/desc_error.nim @@ -12,7 +12,6 @@ type AristoError* = enum NothingSerious = 0 - # Cache checker `checkCache()` CheckAnyVidDeadStorageRoot CheckAnyVidSharedStorageRoot @@ -20,8 +19,6 @@ type CheckAnyVtxEmptyKeyExpected CheckAnyVtxEmptyKeyMismatch CheckAnyVtxBranchLinksMissing - CheckAnyVtxLockWithoutKey - CheckAnyVTopUnset CheckBeCacheGarbledVTop CheckBeCacheKeyDangling @@ -34,11 +31,8 @@ type CheckStkKeyStrayZeroEntry CheckStkVtxKeyMismatch - CheckRlxVtxIncomplete - CheckRlxVtxKeyMissing CheckRlxVtxKeyMismatch - # De-serialiser from `blobify.nim` Deblob256LenUnsupported Deblob64LenUnsupported @@ -54,39 +48,24 @@ type DeblobWrongSize DeblobWrongType - # Deletion of vertex paths, `deleteXxx()` - DelAccRootNotAccepted DelBranchExpexted DelBranchWithoutRefs - DelDanglingStoTrie DelLeafExpexted - DelRootVidMissing - DelStoRootNotAccepted DelVidStaleVtx # Fetch functions from `aristo_fetch.nim` FetchAccInaccessible FetchAccPathWithoutLeaf - FetchAccRootNotAccepted - FetchLeafKeyInvalid - FetchPathInvalid FetchPathNotFound FetchPathStoRootMissing - FetchRootVidMissing - FetchStoRootNotAccepted - # Get functions from `aristo_get.nim` - GetFilNotFound - GetFqsNotFound GetKeyNotFound GetKeyUpdateNeeded - GetLstNotFound GetTuvNotFound GetVtxNotFound - # Path function `hikeUp()` HikeBranchMissingEdge HikeBranchTailEmpty @@ -96,35 +75,11 @@ type HikeNoLegs HikeRootMissing - # Merge leaf `merge()` MergeHikeFailed # Ooops, internal error - MergeAccRootNotAccepted - MergeStoRootNotAccepted MergeNoAction - MergeRootVidMissing MergeStoAccMissing - - # Neighbour vertex, tree traversal `nearbyRight()` and `nearbyLeft()` - NearbyBeyondRange - NearbyBranchError - NearbyDanglingLink - NearbyEmptyHike - NearbyFailed - NearbyLeafExpected - NearbyNestingTooDeep - NearbyPathTailUnexpected - NearbyUnexpectedVtx - NearbyVidInvalid - - - # Path/nibble/key conversions in `aisto_path.nim` - PathExpected64Nibbles - PathAtMost64Nibbles - PathExpectedLeaf - - # Part/proof node errors PartChnBranchPathExhausted PartChnBranchVoidEdge @@ -141,22 +96,13 @@ type # RocksDB backend RdbBeCantCreateTmpDir RdbBeDriverDelAdmError - RdbBeDriverDelKeyError RdbBeDriverDelVtxError RdbBeDriverGetAdmError RdbBeDriverGetKeyError RdbBeDriverGetVtxError - RdbBeDriverGuestError RdbBeDriverPutAdmError - RdbBeDriverPutKeyError RdbBeDriverPutVtxError RdbBeDriverWriteError - RdbBeTypeUnsupported - RdbBeWrSessionUnfinished - RdbBeWrTriggerActiveAlready - RdbBeWrTriggerNilFn - RdbGuestInstanceAborted - RdbHashKeyExpected # End diff --git a/execution_chain/db/aristo/aristo_hike.nim b/execution_chain/db/aristo/aristo_hike.nim index aba286367d..0f6ed2f69f 100644 --- a/execution_chain/db/aristo/aristo_hike.nim +++ b/execution_chain/db/aristo/aristo_hike.nim @@ -195,16 +195,6 @@ proc hikeUp*[LeafType]( ok() -# proc hikeUp*[LeafType]( -# path: openArray[byte]; -# root: VertexID; -# db: AristoTxRef; -# leaf: Opt[LeafType]; -# hike: var Hike -# ): Result[void,(VertexID,AristoError)] = -# ## Variant of `hike()` -# NibblesBuf.fromBytes(path).hikeUp(root, db, leaf, hike) - proc hikeUp*[LeafType]( path: Hash32; root: VertexID; diff --git a/execution_chain/db/aristo/aristo_layers.nim b/execution_chain/db/aristo/aristo_layers.nim index c5f01af5be..e65f9dcd36 100644 --- a/execution_chain/db/aristo/aristo_layers.nim +++ b/execution_chain/db/aristo/aristo_layers.nim @@ -91,13 +91,39 @@ func layersPutVtx*( rvid: RootedVertexID; vtx: VertexRef; ) = - ## Store a (potentally empty) vertex on the top layer + ## Store a fresh instance (or nil) of a vertex on the top layer db.sTab[rvid] = vtx db.kMap.del(rvid) if db.snapshot.level.isSome(): db.snapshot.vtx[rvid] = (vtx, VOID_HASH_KEY, db.level) +func layersPrepareUpdate[T: VertexRef](db: AristoTxRef, rvid: RootedVertexID, vtx: T): T = + if rvid in db.sTab: + vtx + else: + let dup = vtx.dup() + db.sTab[rvid] = dup + dup + +func layersUpdate*[T: BranchRef | LeafRef]( + db: AristoTxRef; + rvid: RootedVertexID; + vtx: T; + ): T = + ## Prepare the given vertex for updates, allocating a new one or updating the + ## existing one depending whether it belongs to this layer already or resides + ## in a different layer and therefore should not be mutated. + let vtx = db.layersPrepareUpdate(rvid, vtx) + + when T is BranchRef: + # Only branches have keys stored and we're not changing vertex type + db.kMap.del(rvid) + + if db.snapshot.level.isSome(): + db.snapshot.vtx[rvid] = (VertexRef(vtx), VOID_HASH_KEY, db.level) + vtx + func layersResVtx*( db: AristoTxRef; rvid: RootedVertexID; @@ -109,25 +135,28 @@ func layersResVtx*( func layersPutKey*( db: AristoTxRef; rvid: RootedVertexID; - vtx: VertexRef, + vtx: BranchRef, key: HashKey; ) = - ## Store a (potentally void) hash key on the top layer - db.sTab[rvid] = vtx + ## Store a (potentally void) hash key on the top layer - we don't store keys + ## for leaves since these are trivial to compute + let vtx = db.layersPrepareUpdate(rvid, vtx) + db.kMap[rvid] = key if db.snapshot.level.isSome(): - db.snapshot.vtx[rvid] = (vtx, key, db.level) + db.snapshot.vtx[rvid] = (VertexRef(vtx), key, db.level) -func layersResKey*(db: AristoTxRef; rvid: RootedVertexID, vtx: VertexRef) = - ## Shortcut for `db.layersPutKey(vid, VOID_HASH_KEY)`. It is sort of the - ## equivalent of a delete function. - db.layersPutVtx(rvid, vtx) +func layersResKey*(db: AristoTxRef; rvid: RootedVertexID, vtx: BranchRef) = + ## Shortcut for `db.layersPutKey(vid, VOID_HASH_KEY)` which resets the hash + ## key cache for the given rvid / vtx + discard db.layersUpdate(rvid, vtx) func layersResKeys*(db: AristoTxRef; hike: Hike, skip: int) = ## Reset all cached keys along the given hike for i in (skip + 1)..hike.legs.len: - db.layersResKey((hike.root, hike.legs[^i].wp.vid), hike.legs[^i].wp.vtx) + if hike.legs[^i].wp.vtx.vType in Branches: + db.layersResKey((hike.root, hike.legs[^i].wp.vid), BranchRef(hike.legs[^i].wp.vtx)) func layersPutAccLeaf*(db: AristoTxRef; accPath: Hash32; leafVtx: AccLeafRef) = db.accLeaves[accPath] = leafVtx diff --git a/execution_chain/db/aristo/aristo_merge.nim b/execution_chain/db/aristo/aristo_merge.nim index 42b42cc32b..109e5ab5e4 100644 --- a/execution_chain/db/aristo/aristo_merge.nim +++ b/execution_chain/db/aristo/aristo_merge.nim @@ -65,7 +65,7 @@ proc mergePayloadImpl[LeafType, T]( # VertexID! return ok (db.layersPutLeaf((root, cur), path, payload), nil, nil) vids: ArrayBuf[NibblesBuf.high + 1, VertexID] - vtxs: ArrayBuf[NibblesBuf.high + 1, VertexRef] + vtxs: ArrayBuf[NibblesBuf.high + 1, BranchRef] template resetKeys() = # Reset cached hashes of touched verticies @@ -73,7 +73,6 @@ proc mergePayloadImpl[LeafType, T]( db.layersResKey((root, vids[^i]), vtxs[^i]) while pos < path.len: - # Clear existing merkle keys along the traversal path var psuffix = path.slice(pos) let n = psuffix.sharedPrefixLen(vtx.pfx) case vtx.vType @@ -85,13 +84,15 @@ proc mergePayloadImpl[LeafType, T]( when payload is AristoAccount: if AccLeafRef(vtx).account == payload: return err(MergeNoAction) - let leafVtx = db.layersPutLeaf((root, cur), psuffix, payload) + let leafVtx = db.layersUpdate((root, cur), AccLeafRef(vtx)) + leafVtx.account = payload leafVtx.stoID = AccLeafRef(vtx).stoID else: if StoLeafRef(vtx).stoData == payload: return err(MergeNoAction) - let leafVtx = db.layersPutLeaf((root, cur), psuffix, payload) + let leafVtx = db.layersUpdate((root, cur), StoLeafRef(vtx)) + leafVtx.stoData = payload (leafVtx, nil, nil) else: # Turn leaf into a branch (or extension) then insert the two leaves @@ -140,7 +141,7 @@ proc mergePayloadImpl[LeafType, T]( if next.isValid: vids.add cur - vtxs.add vtx + vtxs.add BranchRef(vtx) cur = next psuffix = psuffix.slice(n + 1) pos += n + 1 @@ -153,11 +154,10 @@ proc mergePayloadImpl[LeafType, T]( # There's no vertex at the branch point - insert the payload as a new # leaf and update the existing branch - let brDup = vtx.dup() - let local = BranchRef(brDup).setUsed(nibble, true) - db.layersPutVtx((root, cur), brDup) - - let leafVtx = db.layersPutLeaf((root, local), psuffix.slice(n + 1), payload) + let + brDup = db.layersUpdate((root, cur), BranchRef(vtx)) + local = brDup.setUsed(nibble, true) + leafVtx = db.layersPutLeaf((root, local), psuffix.slice(n + 1), payload) resetKeys() return ok((leafVtx, nil, nil)) @@ -248,7 +248,8 @@ proc mergeStorageData*( return err(MergeStoAccMissing) let - stoID = AccLeafRef(accHike.legs[^1].wp.vtx).stoID + accVtx = AccLeafRef(accHike.legs[^1].wp.vtx) + stoID = accVtx.stoID # Provide new storage ID when needed useID = @@ -266,8 +267,9 @@ proc mergeStorageData*( return err(error) - # Mark account path Merkle keys for update, except for the vtx we update below - db.layersResKeys(accHike, skip = if not stoID.isValid: 1 else: 0) + # Mark account path Merkle keys for update - the leaf key is not stored so no + # need to mark it + db.layersResKeys(accHike, skip = 1) # Update leaf cache both of the merged value and potentially the displaced # leaf resulting from splitting a leaf into a branch with two leaves @@ -280,10 +282,9 @@ proc mergeStorageData*( if not stoID.isValid: # Make sure that there is an account that refers to that storage trie - let leaf = AccLeafRef(accHike.legs[^1].wp.vtx).dup # Dup on modify + let leaf = db.layersUpdate((STATE_ROOT_VID, accHike.legs[^1].wp.vid), accVtx) # Dup on modify leaf.stoID = useID db.layersPutAccLeaf(accPath, leaf) - db.layersPutVtx((STATE_ROOT_VID, accHike.legs[^1].wp.vid), leaf) ok() From 82d827c817783b9b257e9108722aaf29931f3015 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 4 Jul 2025 14:35:28 +0200 Subject: [PATCH 124/138] nimbus-eth2: bump (#3450) fixes eth2 pointing to branch commit instead of unstable --- vendor/nimbus-eth2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/nimbus-eth2 b/vendor/nimbus-eth2 index 9823087a13..8eb4f78569 160000 --- a/vendor/nimbus-eth2 +++ b/vendor/nimbus-eth2 @@ -1 +1 @@ -Subproject commit 9823087a131444e4c030de9defc62d019233019d +Subproject commit 8eb4f785690b4a7b7e203a158632e68d048f4ee8 From 9f37bd5420d589e26404fde6b2279c3145100d31 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 4 Jul 2025 20:07:53 +0200 Subject: [PATCH 125/138] vendor: maintenance bumps (#3451) * small bugfixes and cleanups across the board --- vendor/nim-confutils | 2 +- vendor/nim-http-utils | 2 +- vendor/nim-libbacktrace | 2 +- vendor/nim-nat-traversal | 2 +- vendor/nim-presto | 2 +- vendor/nim-results | 2 +- vendor/nim-rocksdb | 2 +- vendor/nim-serialization | 2 +- vendor/nim-snappy | 2 +- vendor/nim-sqlite3-abi | 2 +- vendor/nim-ssz-serialization | 2 +- vendor/nim-taskpools | 2 +- vendor/nim-toml-serialization | 2 +- vendor/nim-unicodedb | 2 +- vendor/nim-unittest2 | 2 +- vendor/nim-web3 | 2 +- vendor/nim-websock | 2 +- vendor/nim-zlib | 2 +- vendor/nim-zxcvbn | 2 +- vendor/nimcrypto | 2 +- 20 files changed, 20 insertions(+), 20 deletions(-) diff --git a/vendor/nim-confutils b/vendor/nim-confutils index a9c690e4b7..e214b3992a 160000 --- a/vendor/nim-confutils +++ b/vendor/nim-confutils @@ -1 +1 @@ -Subproject commit a9c690e4b7b22cc3334f91a04fe472ce62c18c40 +Subproject commit e214b3992a31acece6a9aada7d0a1ad37c928f3b diff --git a/vendor/nim-http-utils b/vendor/nim-http-utils index e8fc71aee1..79cbab1460 160000 --- a/vendor/nim-http-utils +++ b/vendor/nim-http-utils @@ -1 +1 @@ -Subproject commit e8fc71aee15203a852f6321e4dd8d87517502847 +Subproject commit 79cbab1460f4c0cdde2084589d017c43a3d7b4f1 diff --git a/vendor/nim-libbacktrace b/vendor/nim-libbacktrace index 99cd1a3f15..5535cfc4f0 160000 --- a/vendor/nim-libbacktrace +++ b/vendor/nim-libbacktrace @@ -1 +1 @@ -Subproject commit 99cd1a3f1568e7cfbbb6d886c93e4452dc65e4ef +Subproject commit 5535cfc4f0e0912abcbaacadcc7115066a5c0901 diff --git a/vendor/nim-nat-traversal b/vendor/nim-nat-traversal index 05e76accbc..981821197e 160000 --- a/vendor/nim-nat-traversal +++ b/vendor/nim-nat-traversal @@ -1 +1 @@ -Subproject commit 05e76accbc7811273fadc23d8886ac1a2f83bb81 +Subproject commit 981821197e839dc0544fe49c58e0da3f8a6104bf diff --git a/vendor/nim-presto b/vendor/nim-presto index 5d5dc51bac..3ccb356220 160000 --- a/vendor/nim-presto +++ b/vendor/nim-presto @@ -1 +1 @@ -Subproject commit 5d5dc51bac4aafb26c03d2d813a47e80093bd0c7 +Subproject commit 3ccb356220b70f7d9eb0fbd58b674c4080f78014 diff --git a/vendor/nim-results b/vendor/nim-results index 71d404b314..df8113dda4 160000 --- a/vendor/nim-results +++ b/vendor/nim-results @@ -1 +1 @@ -Subproject commit 71d404b314479a6205bfd050f4fe5fe49cdafc69 +Subproject commit df8113dda4c2d74d460a8fa98252b0b771bf1f27 diff --git a/vendor/nim-rocksdb b/vendor/nim-rocksdb index c8cc5506c8..f420a09cac 160000 --- a/vendor/nim-rocksdb +++ b/vendor/nim-rocksdb @@ -1 +1 @@ -Subproject commit c8cc5506c8dd28e78591701ace81dc113bf9ceb7 +Subproject commit f420a09caca12cb173f39f8107d20600c420b9cb diff --git a/vendor/nim-serialization b/vendor/nim-serialization index 2086c99608..959077711a 160000 --- a/vendor/nim-serialization +++ b/vendor/nim-serialization @@ -1 +1 @@ -Subproject commit 2086c99608b4bf472e1ef5fe063710f280243396 +Subproject commit 959077711ad8bf460a5fd7c3b5b4ac78894d7b46 diff --git a/vendor/nim-snappy b/vendor/nim-snappy index 590edb1520..8291337351 160000 --- a/vendor/nim-snappy +++ b/vendor/nim-snappy @@ -1 +1 @@ -Subproject commit 590edb152071bca4901bcbe689fc0856efd8c4e7 +Subproject commit 829133735113951b219e3b108a6bd2146209300b diff --git a/vendor/nim-sqlite3-abi b/vendor/nim-sqlite3-abi index acd3c32743..bdf01cf423 160000 --- a/vendor/nim-sqlite3-abi +++ b/vendor/nim-sqlite3-abi @@ -1 +1 @@ -Subproject commit acd3c327433784226b412757bdb5455b5be04c55 +Subproject commit bdf01cf4236fb40788f0733466cdf6708783cbac diff --git a/vendor/nim-ssz-serialization b/vendor/nim-ssz-serialization index 55ac17ca1f..0f7515524e 160000 --- a/vendor/nim-ssz-serialization +++ b/vendor/nim-ssz-serialization @@ -1 +1 @@ -Subproject commit 55ac17ca1f42afa35db9a06dd50c4d79a17c5d28 +Subproject commit 0f7515524e23ede6510d156fd7b34766083990eb diff --git a/vendor/nim-taskpools b/vendor/nim-taskpools index f0b386933f..9e8ccc7546 160000 --- a/vendor/nim-taskpools +++ b/vendor/nim-taskpools @@ -1 +1 @@ -Subproject commit f0b386933ffea89098bc232359f8524393c2c1ef +Subproject commit 9e8ccc754631ac55ac2fd495e167e74e86293edb diff --git a/vendor/nim-toml-serialization b/vendor/nim-toml-serialization index 53ae081cc9..fea85b27f0 160000 --- a/vendor/nim-toml-serialization +++ b/vendor/nim-toml-serialization @@ -1 +1 @@ -Subproject commit 53ae081cc9a3e61095cab49862658a46b00eacf7 +Subproject commit fea85b27f0badcf617033ca1bc05444b5fd8aa7a diff --git a/vendor/nim-unicodedb b/vendor/nim-unicodedb index 15c5e25e2a..66f2458710 160000 --- a/vendor/nim-unicodedb +++ b/vendor/nim-unicodedb @@ -1 +1 @@ -Subproject commit 15c5e25e2a49a924bc97647481ff50125bba2c76 +Subproject commit 66f2458710dc641dd4640368f9483c8a0ec70561 diff --git a/vendor/nim-unittest2 b/vendor/nim-unittest2 index 9c716f162a..8b51e99b4a 160000 --- a/vendor/nim-unittest2 +++ b/vendor/nim-unittest2 @@ -1 +1 @@ -Subproject commit 9c716f162ad4f44def506dcdf9f6ebd0563991dc +Subproject commit 8b51e99b4a57fcfb31689230e75595f024543024 diff --git a/vendor/nim-web3 b/vendor/nim-web3 index 7de20af8e4..6231ca3305 160000 --- a/vendor/nim-web3 +++ b/vendor/nim-web3 @@ -1 +1 @@ -Subproject commit 7de20af8e4d3ae61fb67028ff0295f790268f706 +Subproject commit 6231ca330534c88805ded82810b4dd40f9d43323 diff --git a/vendor/nim-websock b/vendor/nim-websock index 179f81deda..d5cd89062c 160000 --- a/vendor/nim-websock +++ b/vendor/nim-websock @@ -1 +1 @@ -Subproject commit 179f81dedaddb5ba8d02534ccc8b7a8335981f49 +Subproject commit d5cd89062cd2d168ef35193c7d29d2102921d97e diff --git a/vendor/nim-zlib b/vendor/nim-zlib index 02311a3562..daa8723fd3 160000 --- a/vendor/nim-zlib +++ b/vendor/nim-zlib @@ -1 +1 @@ -Subproject commit 02311a35623964a3ef37da8cd896ed95be06e6da +Subproject commit daa8723fd32299d4ca621c837430c29a5a11e19a diff --git a/vendor/nim-zxcvbn b/vendor/nim-zxcvbn index 4a200471b3..d827fdc996 160000 --- a/vendor/nim-zxcvbn +++ b/vendor/nim-zxcvbn @@ -1 +1 @@ -Subproject commit 4a200471b3a5798c9eb8be261bea999e7831245a +Subproject commit d827fdc9968a4f06bba587dc38df2b20399f8cf5 diff --git a/vendor/nimcrypto b/vendor/nimcrypto index 19c41d6be4..dbe36faf26 160000 --- a/vendor/nimcrypto +++ b/vendor/nimcrypto @@ -1 +1 @@ -Subproject commit 19c41d6be4c00b4a2c8000583bd30cf8ceb5f4b1 +Subproject commit dbe36faf265cefa29d9ab32598b989da91328182 From 379b4ff72f17616f36a61df92a602b89580fb39d Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Sun, 6 Jul 2025 19:27:37 +0800 Subject: [PATCH 126/138] Stateless: Make collection of witness keys in ledger configurable at runtime (#3448) * Enable collection of witness keys in ledger at runtime via statelessProviderEnabled flag. --- execution_chain/common/common.nim | 19 +- execution_chain/config.nim | 18 +- execution_chain/db/ledger.nim | 47 ++--- execution_chain/evm/state.nim | 4 +- execution_chain/nimbus_execution_client.nim | 3 +- portal/evm/async_evm.nim | 1 + portal/nim.cfg | 2 - tests/test_ledger.nim | 215 ++++++++++---------- tests/test_stateless_witness.nim | 2 - 9 files changed, 159 insertions(+), 152 deletions(-) diff --git a/execution_chain/common/common.nim b/execution_chain/common/common.nim index 3d7c1be01d..933cc4803f 100644 --- a/execution_chain/common/common.nim +++ b/execution_chain/common/common.nim @@ -90,6 +90,11 @@ type taskpool*: Taskpool ## Shared task pool for offloading computation to other threads + statelessProviderEnabled*: bool + ## Enable the stateless provider. This turns on the features required + ## by stateless clients such as generation and storage of block witnesses + ## and serving these witnesses to peers over the p2p network. + # ------------------------------------------------------------------------------ # Private helper functions # ------------------------------------------------------------------------------ @@ -161,7 +166,8 @@ proc init(com : CommonRef, networkId : NetworkId, config : ChainConfig, genesis : Genesis, - initializeDb: bool) = + initializeDb: bool, + statelessProviderEnabled: bool) = config.daoCheck() @@ -199,6 +205,8 @@ proc init(com : CommonRef, if initializeDb: com.initializeDb() + com.statelessProviderEnabled = statelessProviderEnabled + proc isBlockAfterTtd(com: CommonRef, header: Header, txFrame: CoreDbTxRef): bool = if com.config.terminalTotalDifficulty.isNone: return false @@ -221,6 +229,7 @@ proc new*( networkId: NetworkId = MainNet; params = networkParams(MainNet); initializeDb = true; + statelessProviderEnabled = false ): CommonRef = ## If genesis data is present, the forkIds will be initialized @@ -232,7 +241,8 @@ proc new*( networkId, params.config, params.genesis, - initializeDb) + initializeDb, + statelessProviderEnabled) proc new*( _: type CommonRef; @@ -241,6 +251,7 @@ proc new*( config: ChainConfig; networkId: NetworkId = MainNet; initializeDb = true; + statelessProviderEnabled = false ): CommonRef = ## There is no genesis data present @@ -252,7 +263,8 @@ proc new*( networkId, config, nil, - initializeDb) + initializeDb, + statelessProviderEnabled) func clone*(com: CommonRef, db: CoreDbRef): CommonRef = ## clone but replace the db @@ -265,6 +277,7 @@ func clone*(com: CommonRef, db: CoreDbRef): CommonRef = genesisHash : com.genesisHash, genesisHeader: com.genesisHeader, networkId : com.networkId, + statelessProviderEnabled: com.statelessProviderEnabled ) func clone*(com: CommonRef): CommonRef = diff --git a/execution_chain/config.nim b/execution_chain/config.nim index 98e3594799..9f9fe4af33 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -410,6 +410,15 @@ type desc: "Eagerly check state roots when syncing finalized blocks" name: "debug-eager-state-root".}: bool + statelessProviderEnabled* {. + separator: "\pSTATELESS PROVIDER OPTIONS:" + hidden + desc: "Enable the stateless provider. This turns on the features required" & + " by stateless clients such as generation and stored of block witnesses" & + " and serving these witnesses to peers over the p2p network." + defaultValue: false + name: "stateless-provider" }: bool + case cmd* {. command defaultValue: NimbusCmd.noCommand }: NimbusCmd @@ -501,15 +510,6 @@ type defaultValueDesc: "\"jwt.hex\" in the data directory (see --data-dir)" name: "jwt-secret" .}: Option[InputFile] - statelessProviderEnabled* {. - separator: "\pSTATELESS PROVIDER OPTIONS:" - hidden - desc: "Enable the stateless provider. This turns on the features required" & - " by stateless clients such as generation and stored of block witnesses" & - " and serving these witnesses to peers over the p2p network." - defaultValue: false - name: "stateless-provider" }: bool - of `import`: maxBlocks* {. desc: "Maximum number of blocks to import" diff --git a/execution_chain/db/ledger.nim b/execution_chain/db/ledger.nim index 2470c2f121..4e51ee8530 100644 --- a/execution_chain/db/ledger.nim +++ b/execution_chain/db/ledger.nim @@ -38,19 +38,16 @@ const # in greater detail. slotsLruSize = 16 * 1024 - statelessEnabled = defined(stateless) -when statelessEnabled: - type - WitnessKey* = tuple[ - address: Address, - slot: Opt[UInt256] - ] +type + WitnessKey* = tuple[ + address: Address, + slot: Opt[UInt256] + ] - # Maps witness keys to the codeTouched flag - WitnessTable* = OrderedTable[WitnessKey, bool] + # Maps witness keys to the codeTouched flag + WitnessTable* = OrderedTable[WitnessKey, bool] -type AccountFlag = enum Alive IsNew @@ -95,10 +92,10 @@ type ## over and over again to the database to avoid the WAL and compation ## write amplification that ensues - when statelessEnabled: - witnessKeys: WitnessTable - ## Used to collect the keys of all read accounts, code and storage slots. - ## Maps a tuple of address and slot (optional) to the codeTouched flag. + collectWitness: bool + witnessKeys: WitnessTable + ## Used to collect the keys of all read accounts, code and storage slots. + ## Maps a tuple of address and slot (optional) to the codeTouched flag. ReadOnlyLedger* = distinct LedgerRef @@ -169,7 +166,7 @@ proc getAccount( address: Address; shouldCreate = true; ): AccountRef = - when statelessEnabled: + if ac.collectWitness: let lookupKey = (address, Opt.none(UInt256)) if not ac.witnessKeys.contains(lookupKey): ac.witnessKeys[lookupKey] = false @@ -371,12 +368,13 @@ proc makeDirty(ac: LedgerRef, address: Address, cloneStorage = true): AccountRef # ------------------------------------------------------------------------------ # The LedgerRef is modeled after TrieDatabase for it's transaction style -proc init*(x: typedesc[LedgerRef], db: CoreDbTxRef, storeSlotHash: bool): LedgerRef = +proc init*(x: typedesc[LedgerRef], db: CoreDbTxRef, storeSlotHash: bool, collectWitness = false): LedgerRef = new result result.txFrame = db result.storeSlotHash = storeSlotHash result.code = typeof(result.code).init(codeLruSize) result.slots = typeof(result.slots).init(slotsLruSize) + result.collectWitness = collectWitness discard result.beginSavepoint proc init*(x: typedesc[LedgerRef], db: CoreDbTxRef): LedgerRef = @@ -460,7 +458,7 @@ proc getNonce*(ac: LedgerRef, address: Address): AccountNonce = proc getCode*(ac: LedgerRef, address: Address, returnHash: static[bool] = false): auto = - when statelessEnabled: + if ac.collectWitness: let lookupKey = (address, Opt.none(UInt256)) # We overwrite any existing record here so that codeTouched is always set to # true even if an account was previously accessed without touching the code @@ -524,7 +522,7 @@ proc resolveCode*(ac: LedgerRef, address: Address): CodeBytesRef = proc getCommittedStorage*(ac: LedgerRef, address: Address, slot: UInt256): UInt256 = let acc = ac.getAccount(address, false) - when statelessEnabled: + if ac.collectWitness: let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): ac.witnessKeys[lookupKey] = false @@ -536,7 +534,7 @@ proc getCommittedStorage*(ac: LedgerRef, address: Address, slot: UInt256): UInt2 proc getStorage*(ac: LedgerRef, address: Address, slot: UInt256): UInt256 = let acc = ac.getAccount(address, false) - when statelessEnabled: + if ac.collectWitness: let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): ac.witnessKeys[lookupKey] = false @@ -623,7 +621,7 @@ proc setStorage*(ac: LedgerRef, address: Address, slot, value: UInt256) = let acc = ac.getAccount(address) acc.flags.incl {Alive} - when statelessEnabled: + if ac.collectWitness: let lookupKey = (address, Opt.some(slot)) if not ac.witnessKeys.contains(lookupKey): ac.witnessKeys[lookupKey] = false @@ -880,12 +878,11 @@ proc getStorageProof*(ac: LedgerRef, address: Address, slots: openArray[UInt256] storageProof -when statelessEnabled: - func getWitnessKeys*(ac: LedgerRef): WitnessTable = - ac.witnessKeys +func getWitnessKeys*(ac: LedgerRef): WitnessTable = + ac.witnessKeys - proc clearWitnessKeys*(ac: LedgerRef) = - ac.witnessKeys.clear() +proc clearWitnessKeys*(ac: LedgerRef) = + ac.witnessKeys.clear() # ------------------------------------------------------------------------------ # Public virtual read-only methods diff --git a/execution_chain/evm/state.nim b/execution_chain/evm/state.nim index 8d11fcdf6a..07d9dca17a 100644 --- a/execution_chain/evm/state.nim +++ b/execution_chain/evm/state.nim @@ -91,7 +91,7 @@ proc new*( ## with the `parent` block header. new result result.init( - ac = LedgerRef.init(txFrame, storeSlotHash), + ac = LedgerRef.init(txFrame, storeSlotHash, com.statelessProviderEnabled), parent = parent, blockCtx = blockCtx, com = com, @@ -158,7 +158,7 @@ proc init*( ## It requires the `header` argument properly initalised so that for PoA ## networks, the miner address is retrievable via `ecRecover()`. self.init( - ac = LedgerRef.init(txFrame, storeSlotHash), + ac = LedgerRef.init(txFrame, storeSlotHash, com.statelessProviderEnabled), parent = parent, blockCtx = blockCtx(header), com = com, diff --git a/execution_chain/nimbus_execution_client.nim b/execution_chain/nimbus_execution_client.nim index ede5765957..72532247db 100644 --- a/execution_chain/nimbus_execution_client.nim +++ b/execution_chain/nimbus_execution_client.nim @@ -236,7 +236,8 @@ proc run(nimbus: NimbusNode, conf: NimbusConf) = db = coreDB, taskpool = taskpool, networkId = conf.networkId, - params = conf.networkParams) + params = conf.networkParams, + statelessProviderEnabled = conf.statelessProviderEnabled) if conf.extraData.len > 32: warn "ExtraData exceeds 32 bytes limit, truncate", diff --git a/portal/evm/async_evm.nim b/portal/evm/async_evm.nim index e850d156de..28833b41c8 100644 --- a/portal/evm/async_evm.nim +++ b/portal/evm/async_evm.nim @@ -104,6 +104,7 @@ proc init*( taskpool = nil, config = chainConfigForNetwork(networkId), initializeDb = false, + statelessProviderEnabled = true, # Enables collection of witness keys ) AsyncEvm(com: com, backend: backend) diff --git a/portal/nim.cfg b/portal/nim.cfg index 93e93c66f0..6f11c16ba8 100644 --- a/portal/nim.cfg +++ b/portal/nim.cfg @@ -13,5 +13,3 @@ @end --hint[Processing]:off - --d:"stateless" diff --git a/tests/test_ledger.nim b/tests/test_ledger.nim index d52a9fb481..45523c41b3 100644 --- a/tests/test_ledger.nim +++ b/tests/test_ledger.nim @@ -646,114 +646,113 @@ proc runLedgerBasicOperationsTests() = check 2.u256 in vals check 3.u256 in vals - when defined(stateless): - - test "Witness keys - Get account": - var - ac = LedgerRef.init(memDB.baseTxFrame()) - addr1 = initAddr(1) - - discard ac.getAccount(addr1) - - let - witnessKeys = ac.getWitnessKeys() - key = (addr1, Opt.none(UInt256)) - check: - witnessKeys.len() == 1 - witnessKeys.contains(key) - witnessKeys.getOrDefault(key) == false - - test "Witness keys - Get code": - var - ac = LedgerRef.init(memDB.baseTxFrame()) - addr1 = initAddr(1) - - discard ac.getCode(addr1) - - let - witnessKeys = ac.getWitnessKeys() - key = (addr1, Opt.none(UInt256)) - check: - witnessKeys.len() == 1 - witnessKeys.contains(key) - witnessKeys.getOrDefault(key) == true - - test "Witness keys - Get storage": - var - ac = LedgerRef.init(memDB.baseTxFrame()) - addr1 = initAddr(1) - slot1 = 1.u256 - - discard ac.getStorage(addr1, slot1) - - let - witnessKeys = ac.getWitnessKeys() - key = (addr1, Opt.some(slot1)) - check: - witnessKeys.len() == 2 - witnessKeys.contains(key) - witnessKeys.getOrDefault(key) == false - - test "Witness keys - Set storage": - var - ac = LedgerRef.init(memDB.baseTxFrame()) - addr1 = initAddr(1) - slot1 = 1.u256 - - ac.setStorage(addr1, slot1, slot1) - - let - witnessKeys = ac.getWitnessKeys() - key = (addr1, Opt.some(slot1)) - check: - witnessKeys.len() == 2 - witnessKeys.contains(key) - witnessKeys.getOrDefault(key) == false - - test "Witness keys - Get account, code and storage": - var - ac = LedgerRef.init(memDB.baseTxFrame()) - addr1 = initAddr(1) - addr2 = initAddr(2) - addr3 = initAddr(3) - slot1 = 1.u256 - - - discard ac.getAccount(addr1) - discard ac.getCode(addr2) - discard ac.getCode(addr1) - discard ac.getStorage(addr2, slot1) - discard ac.getStorage(addr1, slot1) - discard ac.getStorage(addr2, slot1) - discard ac.getAccount(addr3) - - let witnessKeys = ac.getWitnessKeys() - check witnessKeys.len() == 5 - - var keysList = newSeq[(WitnessKey, bool)]() - for k, v in witnessKeys: - keysList.add((k, v)) - - check: - keysList[0][0].address == addr1 - keysList[0][0].slot == Opt.none(UInt256) - keysList[0][1] == true - - keysList[1][0].address == addr2 - keysList[1][0].slot == Opt.none(UInt256) - keysList[1][1] == true - - keysList[2][0].address == addr2 - keysList[2][0].slot == Opt.some(slot1) - keysList[2][1] == false - - keysList[3][0].address == addr1 - keysList[3][0].slot == Opt.some(slot1) - keysList[3][1] == false - - keysList[4][0].address == addr3 - keysList[4][0].slot == Opt.none(UInt256) - keysList[4][1] == false + + test "Witness keys - Get account": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + + discard ac.getAccount(addr1) + + let + witnessKeys = ac.getWitnessKeys() + key = (addr1, Opt.none(UInt256)) + check: + witnessKeys.len() == 1 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false + + test "Witness keys - Get code": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + + discard ac.getCode(addr1) + + let + witnessKeys = ac.getWitnessKeys() + key = (addr1, Opt.none(UInt256)) + check: + witnessKeys.len() == 1 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == true + + test "Witness keys - Get storage": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + slot1 = 1.u256 + + discard ac.getStorage(addr1, slot1) + + let + witnessKeys = ac.getWitnessKeys() + key = (addr1, Opt.some(slot1)) + check: + witnessKeys.len() == 2 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false + + test "Witness keys - Set storage": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + slot1 = 1.u256 + + ac.setStorage(addr1, slot1, slot1) + + let + witnessKeys = ac.getWitnessKeys() + key = (addr1, Opt.some(slot1)) + check: + witnessKeys.len() == 2 + witnessKeys.contains(key) + witnessKeys.getOrDefault(key) == false + + test "Witness keys - Get account, code and storage": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + addr2 = initAddr(2) + addr3 = initAddr(3) + slot1 = 1.u256 + + + discard ac.getAccount(addr1) + discard ac.getCode(addr2) + discard ac.getCode(addr1) + discard ac.getStorage(addr2, slot1) + discard ac.getStorage(addr1, slot1) + discard ac.getStorage(addr2, slot1) + discard ac.getAccount(addr3) + + let witnessKeys = ac.getWitnessKeys() + check witnessKeys.len() == 5 + + var keysList = newSeq[(WitnessKey, bool)]() + for k, v in witnessKeys: + keysList.add((k, v)) + + check: + keysList[0][0].address == addr1 + keysList[0][0].slot == Opt.none(UInt256) + keysList[0][1] == true + + keysList[1][0].address == addr2 + keysList[1][0].slot == Opt.none(UInt256) + keysList[1][1] == true + + keysList[2][0].address == addr2 + keysList[2][0].slot == Opt.some(slot1) + keysList[2][1] == false + + keysList[3][0].address == addr1 + keysList[3][0].slot == Opt.some(slot1) + keysList[3][1] == false + + keysList[4][0].address == addr3 + keysList[4][0].slot == Opt.none(UInt256) + keysList[4][1] == false # ------------------------------------------------------------------------------ diff --git a/tests/test_stateless_witness.nim b/tests/test_stateless_witness.nim index 1652e6b2aa..dc4b97e107 100644 --- a/tests/test_stateless_witness.nim +++ b/tests/test_stateless_witness.nim @@ -21,7 +21,6 @@ suite "Execution Witness Tests": let witnessBytes = witness.encode() check witnessBytes.len() > 0 - echo witnessBytes let decodedWitness = ExecutionWitness.decode(witnessBytes) check: @@ -37,7 +36,6 @@ suite "Execution Witness Tests": let witnessBytes = witness.encode() check witnessBytes.len() > 0 - echo witnessBytes let decodedWitness = ExecutionWitness.decode(witnessBytes) check: From 49c907cbe9ceb8493b04555fc25f0ae7dd195797 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 7 Jul 2025 16:43:13 +0700 Subject: [PATCH 127/138] Simplify FC node coloring/updateFinalized (#3452) * Simplify FC node coloring * Optimize updateFinalized --- execution_chain/core/chain/forked_chain.nim | 137 +++++++++--------- .../core/chain/forked_chain/chain_branch.nim | 21 ++- .../chain/forked_chain/chain_serialize.nim | 14 +- 3 files changed, 90 insertions(+), 82 deletions(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 8d6291a449..bd3882def0 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -63,6 +63,8 @@ func appendBlock(c: ForkedChainRef, parent : parent, ) + # Only finalized segment have finalized marker + newBlock.notFinalized() c.hashToBlock[blkHash] = newBlock c.latest = newBlock @@ -121,13 +123,13 @@ func findFinalizedPos( # There is no point traversing the DAG if there is only one branch. # Just return the node. - if c.heads.len > 1: - loopIt(head): - if it == fin: - return ok(fin) - else: + if c.heads.len == 1: return ok(fin) + loopIt(head): + if it == fin: + return ok(fin) + err("Invalid finalizedHash: block not in argument head ancestor lineage") func calculateNewBase( @@ -215,10 +217,6 @@ proc updateHead(c: ForkedChainRef, head: BlockRef) = head.hash, head.number) -func uncolorAll(c: ForkedChainRef) = - for node in values(c.hashToBlock): - node.noColor() - proc updateFinalized(c: ForkedChainRef, finalized: BlockRef, fcuHead: BlockRef) = # Pruning # :: @@ -232,64 +230,66 @@ proc updateFinalized(c: ForkedChainRef, finalized: BlockRef, fcuHead: BlockRef) # 'B', 'D', and A5 onward will stay # 'C' will be removed - func reachable(head, fin: BlockRef): bool = - loopIt(head): - if it.colored: - return it == fin - false + let txFrame = finalized.txFrame + txFrame.fcuFinalized(finalized.hash, finalized.number).expect("fcuFinalized OK") # There is no point running this expensive algorithm # if the chain have no branches, just move it forward. - if c.heads.len > 1: - c.uncolorAll() - loopIt(finalized): - it.color() - - var - i = 0 - updateLatest = false - - while i < c.heads.len: - let head = c.heads[i] - - # Any branches not reachable from finalized - # should be removed. - if not reachable(head, finalized): - loopIt(head): - if not it.colored and it.txFrame.isNil.not: - c.removeBlockFromCache(it) - else: - break - - if head == c.latest: - updateLatest = true - - c.heads.del(i) - # no need to increment i when we delete from c.heads. - continue - - inc i - - if updateLatest: - # Previous `latest` is pruned, select a new latest - # based on longest chain reachable from fcuHead. - var candidate: BlockRef - for head in c.heads: - loopIt(head): - if it == fcuHead: - if candidate.isNil: - candidate = head - elif head.number > candidate.number: - candidate = head - break - if it.number < fcuHead.number: - break - - doAssert(candidate.isNil.not) - c.latest = candidate + if c.heads.len == 1: + return - let txFrame = finalized.txFrame - txFrame.fcuFinalized(finalized.hash, finalized.number).expect("fcuFinalized OK") + func reachable(head, fin: BlockRef): bool = + var it = head + while not it.finalized: + it = it.parent + it == fin + + # Only finalized segment have finalized marker + loopFinalized(finalized): + it.finalize() + + var + i = 0 + updateLatest = false + + while i < c.heads.len: + let head = c.heads[i] + + # Any branches not reachable from finalized + # should be removed. + if not reachable(head, finalized): + loopFinalized(head): + if it.txFrame.isNil: + # Has been deleted by previous branch + break + c.removeBlockFromCache(it) + + if head == c.latest: + updateLatest = true + + c.heads.del(i) + # no need to increment i when we delete from c.heads. + continue + + inc i + + if updateLatest: + # Previous `latest` is pruned, select a new latest + # based on longest chain reachable from fcuHead. + var candidate: BlockRef + for head in c.heads: + loopIt(head): + if it == fcuHead: + if candidate.isNil: + candidate = head + elif head.number > candidate.number: + candidate = head + break + if it.number < fcuHead.number: + break + + doAssert(candidate.isNil.not) + c.latest = candidate proc updateBase(c: ForkedChainRef, base: BlockRef): Future[void] {.async: (raises: [CancelledError]), gcsafe.} = @@ -505,6 +505,10 @@ proc init*( fcuSafe: fcuSafe, ) + # updateFinalized will stop ancestor lineage + # traversal if parent have finalized marker. + baseBlock.finalize() + if enableQueue: fc.queue = newAsyncQueue[QueueItem](maxsize = MaxQueueSize) fc.processingQueueLoop = fc.processQueue() @@ -602,17 +606,12 @@ proc forkChoice*(c: ForkedChainRef, # Head maybe moved backward or moved to other branch. c.updateHead(head) - - if finalizedHash == zeroHash32: - # skip updateBase and updateFinalized if finalizedHash is zero. - return ok() - c.updateFinalized(finalized, head) let base = c.calculateNewBase(finalized.number, head) - if base == c.base: + if base.number == c.base.number: # The base is not updated, return. return ok() diff --git a/execution_chain/core/chain/forked_chain/chain_branch.nim b/execution_chain/core/chain/forked_chain/chain_branch.nim index 672730e85a..958dddcd20 100644 --- a/execution_chain/core/chain/forked_chain/chain_branch.nim +++ b/execution_chain/core/chain/forked_chain/chain_branch.nim @@ -25,7 +25,7 @@ type index* : uint # Alias to parent when serializing - # Also used for DAG node coloring + # Also used for DAG node finalized marker template header*(b: BlockRef): Header = b.blk.header @@ -50,17 +50,24 @@ template stateRoot*(b: BlockRef): Hash32 = b.blk.header.stateRoot const - DAG_NODE_COLORED = 1 + DAG_NODE_FINALIZED = 1 DAG_NODE_CLEAR = 0 -template color*(b: BlockRef) = - b.index = DAG_NODE_COLORED +template finalize*(b: BlockRef) = + b.index = DAG_NODE_FINALIZED -template noColor*(b: BlockRef) = +template notFinalized*(b: BlockRef) = b.index = DAG_NODE_CLEAR -template colored*(b: BlockRef): bool = - b.index == DAG_NODE_COLORED +template finalized*(b: BlockRef): bool = + b.index == DAG_NODE_FINALIZED + +template loopFinalized*(init: BlockRef, body: untyped) = + block: + var it{.inject.} = init + while not it.finalized: + body + it = it.parent iterator everyNthBlock*(base: BlockRef, step: uint64): BlockRef = var diff --git a/execution_chain/core/chain/forked_chain/chain_serialize.nim b/execution_chain/core/chain/forked_chain/chain_serialize.nim index fe5cd88de5..848dd257f7 100644 --- a/execution_chain/core/chain/forked_chain/chain_serialize.nim +++ b/execution_chain/core/chain/forked_chain/chain_serialize.nim @@ -145,9 +145,10 @@ proc replayBranch(fc: ForkedChainRef; var blocks = newSeqOfCap[BlockRef](head.number - parent.number) loopIt(head): - it.color() if it.number > parent.number: blocks.add it + else: + break var parent = parent for i in countdown(blocks.len-1, 0): @@ -158,17 +159,15 @@ proc replayBranch(fc: ForkedChainRef; proc replay(fc: ForkedChainRef): Result[void, string] = # Should have no parent - doAssert fc.base.index == 0 doAssert fc.base.parent.isNil # Receipts for base block are loaded from database # see `receiptsByBlockHash` fc.base.txFrame = fc.baseTxFrame - fc.base.color() for head in fc.heads: loopIt(head): - if it.colored: + if it.txFrame.isNil.not: ?fc.replayBranch(it, head) break @@ -217,7 +216,7 @@ proc serialize*(fc: ForkedChainRef, txFrame: CoreDbTxRef): Result[void, CoreDbEr headHash=fc.fcuHead.hash.short, finalized=fc.latestFinalizedBlockNumber, finalizedHash=fc.pendingFCU.short, - blocksInMemory=fc.hashToBlock.len, + blocksSerialized=fc.hashToBlock.len, heads=fc.heads.toString ok() @@ -274,12 +273,15 @@ proc deserialize*(fc: ForkedChainRef): Result[void, string] = if b.index > 0: b.parent = blocks[b.index-1] fc.hashToBlock[b.hash] = b - b.noColor() # prepare for replay fc.replay().isOkOr: fc.reset(prevBase) return err(error) + # All blocks should have replayed + for b in blocks: + doAssert(b.txFrame.isNil.not, "deserialized node should have txFrame") + fc.hashToBlock.withValue(fc.fcuHead.hash, val) do: let txFrame = val[].txFrame ?txFrame.setHead(val[].header, fc.fcuHead.hash) From b3d3f91216b68a60095bbc04b8ea8c24ce09eee5 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 7 Jul 2025 16:43:31 +0700 Subject: [PATCH 128/138] Schedule orphan block processing to the async worker (#3453) * Schedule orphan block processing to the async worker * update processQueue * existedBlock to existingBlock --- execution_chain/core/chain/forked_chain.nim | 76 ++++++++++++--------- 1 file changed, 43 insertions(+), 33 deletions(-) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index bd3882def0..e187eba7be 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -42,7 +42,7 @@ export const BaseDistance = 128'u64 PersistBatchSize = 32'u64 - MaxQueueSize = 9 + MaxQueueSize = 12 # ------------------------------------------------------------------------------ # Private functions @@ -53,7 +53,7 @@ func appendBlock(c: ForkedChainRef, blk: Block, blkHash: Hash32, txFrame: CoreDbTxRef, - receipts: sink seq[StoredReceipt]) = + receipts: sink seq[StoredReceipt]): BlockRef = let newBlock = BlockRef( blk : blk, @@ -72,10 +72,11 @@ func appendBlock(c: ForkedChainRef, if head.hash == parent.hash: # update existing heads c.heads[i] = newBlock - return + return newBlock # It's a branch c.heads.add newBlock + newBlock proc fcuSetHead(c: ForkedChainRef, txFrame: CoreDbTxRef, @@ -361,13 +362,15 @@ proc updateBase(c: ForkedChainRef, base: BlockRef): proc validateBlock(c: ForkedChainRef, parent: BlockRef, - blk: Block, finalized: bool): Future[Result[Hash32, string]] + blk: Block, finalized: bool): Future[Result[BlockRef, string]] {.async: (raises: [CancelledError]).} = - let blkHash = blk.header.computeBlockHash + let + blkHash = blk.header.computeBlockHash + existingBlock = c.hashToBlock.getOrDefault(blkHash) - if c.hashToBlock.hasKey(blkHash): - # Block exists, just return - return ok(blkHash) + # Block exists, just return + if existingBlock.isOk: + return ok(existingBlock) if blkHash == c.pendingFCU: # Resolve the hash into latestFinalizedBlockNumber @@ -408,7 +411,7 @@ proc validateBlock(c: ForkedChainRef, c.updateSnapshot(blk, txFrame) - c.appendBlock(parent, blk, blkHash, txFrame, move(receipts)) + let newBlock = c.appendBlock(parent, blk, blkHash, txFrame, move(receipts)) for i, tx in blk.transactions: c.txRecords[computeRlpHash(tx)] = (blkHash, uint64(i)) @@ -430,7 +433,30 @@ proc validateBlock(c: ForkedChainRef, if c.fcuHead.number < c.base.number: c.updateHead(c.base) - ok(blkHash) + ok(newBlock) + +template queueOrphan(c: ForkedChainRef, parent: BlockRef, finalized = false): auto = + if c.queue.isNil: + # This recursive mode only used in test env with finite set of blocks + c.processOrphan(parent, finalized) + else: + proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = + await c.processOrphan(parent, finalized) + ok() + c.queue.addLast(QueueItem(handler: asyncHandler)) + +proc processOrphan(c: ForkedChainRef, parent: BlockRef, finalized = false) + {.async: (raises: [CancelledError]).} = + let + orphan = c.quarantine.popOrphan(parent.hash).valueOr: + # No more orphaned block + return + parent = (await c.validateBlock(parent, orphan, finalized)).valueOr: + # Silent? + # We don't return error here because the import is still ok() + # but the quarantined blocks may not linked + return + await c.queueOrphan(parent, finalized) proc processQueue(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = while true: @@ -449,6 +475,10 @@ proc processQueue(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = let item = await c.queue.popFirst() res = await item.handler() + + if item.responseFut.isNil: + continue + if not item.responseFut.finished: item.responseFut.complete res @@ -536,30 +566,10 @@ proc importBlock*(c: ForkedChainRef, blk: Block, finalized = false): # to a "staging area" or disk-backed memory but it must not afect `base`. # `base` is the point of no return, we only update it on finality. - var parentHash = ?(await c.validateBlock(parent, blk, finalized)) - - while c.quarantine.hasOrphans(): - const - # We cap waiting for an idle slot in case there's a lot of network traffic - # taking up all CPU - we don't want to _completely_ stop processing blocks - # in this case - doing so also allows us to benefit from more batching / - # larger network reads when under load. - idleTimeout = 10.milliseconds + let parent = ?(await c.validateBlock(parent, blk, finalized)) + if c.quarantine.hasOrphans(): + await c.queueOrphan(parent, finalized) - discard await idleAsync().withTimeout(idleTimeout) - - let orphan = c.quarantine.popOrphan(parentHash).valueOr: - break - - let parent = c.hashToBlock.getOrDefault(parentHash) - if parent.isOk: - parentHash = (await c.validateBlock(parent, orphan, finalized)).valueOr: - # Silent? - # We don't return error here because the import is still ok() - # but the quarantined blocks may not linked - break - else: - break else: # If its parent is an invalid block # there is no hope the descendant is valid From 0dc7cf31a0bb5c3c761aff1d7e668c90c4682393 Mon Sep 17 00:00:00 2001 From: andri lim Date: Mon, 7 Jul 2025 20:39:11 +0700 Subject: [PATCH 129/138] Restore conditional setupRpcAdmin activation (#3456) --- execution_chain/rpc.nim | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/execution_chain/rpc.nim b/execution_chain/rpc.nim index 0fb5f6380a..3a0304463c 100644 --- a/execution_chain/rpc.nim +++ b/execution_chain/rpc.nim @@ -53,11 +53,8 @@ func installRPC(server: RpcServer, if RpcFlag.Eth in flags: setupServerAPI(serverApi, server, nimbus.ctx) - # TODO: chicken and egg problem. - # Remove comment after this PR below merged. - # https://github.com/ethpandaops/ethereum-package/pull/1092 - #if RpcFlag.Admin in flags: - setupAdminRpc(nimbus, conf, server) + if RpcFlag.Admin in flags: + setupAdminRpc(nimbus, conf, server) # # Tracer is currently disabled # if RpcFlag.Debug in flags: From 7f36a2f13d7d092dd66ef788e8303608faa51a42 Mon Sep 17 00:00:00 2001 From: andri lim Date: Tue, 8 Jul 2025 11:53:43 +0700 Subject: [PATCH 130/138] Schedule `updateBase` to asynchronous worker. (#3455) * Schedule `updateBase` to asynchronous worker. `updateBase` become synchronous and the scheduler will interleave `updateBase` with `importBlock` and `forkChoice`. The scheduler will move the base at fixed size `PersistBatchSize`. * Remove persistBatchQueue and keep persistBatchSize * fix tests * queueUpdateBase tuning * Fix updateBase scheduler * Optimize a bit updateBase and queueUpdateBase --- execution_chain/config.nim | 2 +- execution_chain/core/chain/forked_chain.nim | 142 ++++++++++++------ .../core/chain/forked_chain/chain_branch.nim | 15 -- .../core/chain/forked_chain/chain_desc.nim | 13 +- execution_chain/nimbus_execution_client.nim | 2 +- tests/test_forked_chain.nim | 4 +- 6 files changed, 116 insertions(+), 62 deletions(-) diff --git a/execution_chain/config.nim b/execution_chain/config.nim index 9f9fe4af33..eade2f8e0b 100644 --- a/execution_chain/config.nim +++ b/execution_chain/config.nim @@ -342,7 +342,7 @@ type persistBatchSize* {. hidden - defaultValue: 32'u64 + defaultValue: 4'u64 name: "debug-persist-batch-size" .}: uint64 beaconSyncTargetFile* {. diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index e187eba7be..92d90e9bb4 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -41,8 +41,8 @@ export const BaseDistance = 128'u64 - PersistBatchSize = 32'u64 - MaxQueueSize = 12 + PersistBatchSize = 4'u64 + MaxQueueSize = 128 # ------------------------------------------------------------------------------ # Private functions @@ -292,8 +292,7 @@ proc updateFinalized(c: ForkedChainRef, finalized: BlockRef, fcuHead: BlockRef) doAssert(candidate.isNil.not) c.latest = candidate -proc updateBase(c: ForkedChainRef, base: BlockRef): - Future[void] {.async: (raises: [CancelledError]), gcsafe.} = +proc updateBase(c: ForkedChainRef, base: BlockRef): uint = ## ## A1 - A2 - A3 D5 - D6 ## / / @@ -312,53 +311,110 @@ proc updateBase(c: ForkedChainRef, base: BlockRef): # No update, return return - # Persist the new base block - this replaces the base tx in coredb! - for x in base.everyNthBlock(4): - const - # We cap waiting for an idle slot in case there's a lot of network traffic - # taking up all CPU - we don't want to _completely_ stop processing blocks - # in this case - doing so also allows us to benefit from more batching / - # larger network reads when under load. - idleTimeout = 10.milliseconds - - discard await idleAsync().withTimeout(idleTimeout) - c.com.db.persist(x.txFrame, Opt.some(x.stateRoot)) + c.com.db.persist(base.txFrame, Opt.some(base.stateRoot)) - # Update baseTxFrame when we about to yield to the event loop - # and prevent other modules accessing expired baseTxFrame. - c.baseTxFrame = x.txFrame + # Update baseTxFrame when we about to yield to the event loop + # and prevent other modules accessing expired baseTxFrame. + c.baseTxFrame = base.txFrame # Cleanup in-memory blocks starting from base backward # e.g. B2 backward. - var count = 0 - loopIt(base.parent): + var + count = 0'u + it = base.parent + + while it.isOk: c.removeBlockFromCache(it) inc count + let b = it + it = it.parent + b.parent = nil # Update base branch c.base = base c.base.parent = nil - # Log only if more than one block persisted - # This is to avoid log spamming, during normal operation - # of the client following the chain - # When multiple blocks are persisted together, it's mainly - # during `beacon sync` or `nrpc sync` - if count > 1: - notice "Finalized blocks persisted", - nBlocks = count, - base = c.base.number, - baseHash = c.base.hash.short, - pendingFCU = c.pendingFCU.short, - resolvedFin= c.latestFinalizedBlockNumber + count + +proc processUpdateBase(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = + if c.baseQueue.len > 0: + let base = c.baseQueue.popFirst() + c.persistedCount += c.updateBase(base) + + const + minLogInterval = 5 + + if c.baseQueue.len == 0: + let time = EthTime.now() + if time - c.lastBaseLogTime > minLogInterval: + # Log only if more than one block persisted + # This is to avoid log spamming, during normal operation + # of the client following the chain + # When multiple blocks are persisted together, it's mainly + # during `beacon sync` or `nrpc sync` + if c.persistedCount > 1: + notice "Finalized blocks persisted", + nBlocks = c.persistedCount, + base = c.base.number, + baseHash = c.base.hash.short, + pendingFCU = c.pendingFCU.short, + resolvedFin= c.latestFinalizedBlockNumber + else: + debug "Finalized blocks persisted", + nBlocks = c.persistedCount, + target = c.base.hash.short, + base = c.base.number, + baseHash = c.base.hash.short, + pendingFCU = c.pendingFCU.short, + resolvedFin= c.latestFinalizedBlockNumber + c.lastBaseLogTime = time + c.persistedCount = 0 + return + + if c.queue.isNil: + # This recursive mode only used in test env with small set of blocks + await c.processUpdateBase() else: - debug "Finalized blocks persisted", - nBlocks = count, - target = base.hash.short, - base = c.base.number, - baseHash = c.base.hash.short, - pendingFCU = c.pendingFCU.short, - resolvedFin= c.latestFinalizedBlockNumber + proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = + await c.processUpdateBase() + ok() + await c.queue.addLast(QueueItem(handler: asyncHandler)) + +proc queueUpdateBase(c: ForkedChainRef, base: BlockRef) + {.async: (raises: [CancelledError]).} = + let + prevQueuedBase = if c.baseQueue.len > 0: + c.baseQueue.peekLast() + else: + c.base + + if prevQueuedBase.number == base.number: + return + + var + number = base.number - min(base.number, PersistBatchSize) + steps = newSeqOfCap[BlockRef]((base.number-c.base.number) div PersistBatchSize + 1) + it = prevQueuedBase + + steps.add base + + while it.number > prevQueuedBase.number: + if it.number == number: + steps.add it + number -= min(number, PersistBatchSize) + it = it.parent + + for i in countdown(steps.len-1, 0): + c.baseQueue.addLast(steps[i]) + + if c.queue.isNil: + # This recursive mode only used in test env with small set of blocks + await c.processUpdateBase() + else: + proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = + await c.processUpdateBase() + ok() + await c.queue.addLast(QueueItem(handler: asyncHandler)) proc validateBlock(c: ForkedChainRef, parent: BlockRef, @@ -426,7 +482,7 @@ proc validateBlock(c: ForkedChainRef, prevBase = c.base.number c.updateFinalized(base, base) - await c.updateBase(base) + await c.queueUpdateBase(base) # If on disk head behind base, move it to base too. if c.base.number > prevBase: @@ -437,7 +493,7 @@ proc validateBlock(c: ForkedChainRef, template queueOrphan(c: ForkedChainRef, parent: BlockRef, finalized = false): auto = if c.queue.isNil: - # This recursive mode only used in test env with finite set of blocks + # This recursive mode only used in test env with small set of blocks c.processOrphan(parent, finalized) else: proc asyncHandler(): Future[Result[void, string]] {.async: (raises: [CancelledError]).} = @@ -533,6 +589,8 @@ proc init*( quarantine: Quarantine.init(), fcuHead: fcuHead, fcuSafe: fcuSafe, + baseQueue: initDeque[BlockRef](), + lastBaseLogTime: EthTime.now(), ) # updateFinalized will stop ancestor lineage @@ -630,7 +688,7 @@ proc forkChoice*(c: ForkedChainRef, # and possibly switched to other chain beside the one with head. doAssert(finalized.number <= head.number) doAssert(base.number <= finalized.number) - await c.updateBase(base) + await c.queueUpdateBase(base) ok() diff --git a/execution_chain/core/chain/forked_chain/chain_branch.nim b/execution_chain/core/chain/forked_chain/chain_branch.nim index 958dddcd20..7ba2d5acb9 100644 --- a/execution_chain/core/chain/forked_chain/chain_branch.nim +++ b/execution_chain/core/chain/forked_chain/chain_branch.nim @@ -68,18 +68,3 @@ template loopFinalized*(init: BlockRef, body: untyped) = while not it.finalized: body it = it.parent - -iterator everyNthBlock*(base: BlockRef, step: uint64): BlockRef = - var - number = base.number - min(base.number, step) - steps = newSeqOfCap[BlockRef](128) - - steps.add base - - loopIt(base): - if it.number == number: - steps.add it - number -= min(number, step) - - for i in countdown(steps.len-1, 0): - yield steps[i] diff --git a/execution_chain/core/chain/forked_chain/chain_desc.nim b/execution_chain/core/chain/forked_chain/chain_desc.nim index 326b3272d8..2e831fb2ce 100644 --- a/execution_chain/core/chain/forked_chain/chain_desc.nim +++ b/execution_chain/core/chain/forked_chain/chain_desc.nim @@ -11,7 +11,7 @@ {.push raises: [].} import - std/tables, + std/[tables, deques], chronos, ../../../common, ../../../db/[core_db, fcu_db], @@ -35,6 +35,17 @@ type # The base block, the last block stored in database. # Any blocks newer than base is kept in memory. + baseQueue* : Deque[BlockRef] + # Queue of blocks that will become base. + # This queue will be filled by `importBlock` or `forkChoice`. + # Then consumed by the `processQueue` async worker. + + lastBaseLogTime*: EthTime + + persistedCount*: uint + # Count how many blocks persisted when `baseQueue` + # consumed. + latest* : BlockRef # Every time a new block added, # that block automatically become the latest block. diff --git a/execution_chain/nimbus_execution_client.nim b/execution_chain/nimbus_execution_client.nim index 72532247db..0d9acf2c59 100644 --- a/execution_chain/nimbus_execution_client.nim +++ b/execution_chain/nimbus_execution_client.nim @@ -44,7 +44,7 @@ proc basicServices(nimbus: NimbusNode, # Setup the chain let fc = ForkedChainRef.init(com, eagerStateRoot = conf.eagerStateRootCheck, - persistBatchSize=conf.persistBatchSize, + persistBatchSize = conf.persistBatchSize, enableQueue = true) fc.deserialize().isOkOr: warn "Loading block DAG from database", msg=error diff --git a/tests/test_forked_chain.nim b/tests/test_forked_chain.nim index 85fcf6020c..b84e4b83b8 100644 --- a/tests/test_forked_chain.nim +++ b/tests/test_forked_chain.nim @@ -445,7 +445,7 @@ suite "ForkedChainRef tests": checkHeadHash chain, blk7.blockHash check chain.latestHash == blk7.blockHash check chain.heads.len == 1 - check chain.base.number == 0 + check chain.base.number == 4 check chain.validate info & " (9)" test "newBase on shorter canonical arc, discard arc with oldBase" & @@ -726,7 +726,7 @@ suite "ForkedChainRef tests": checkForkChoice(chain, blk7, blk5) check chain.validate info & " (2)" checkHeadHash chain, blk7.blockHash - check chain.baseNumber == 0'u64 + check chain.baseNumber == 4'u64 check chain.latestHash == blk7.blockHash check chain.validate info & " (3)" From cc4ee6f51ed4c3ac87f78b47b530f3f331914114 Mon Sep 17 00:00:00 2001 From: Chirag Parmar Date: Tue, 8 Jul 2025 14:20:43 +0530 Subject: [PATCH 131/138] proxy: add evm based functionality (#3454) * eth-call * format * fix --- nimbus_verified_proxy/nim.cfg | 1 + .../nimbus_verified_proxy.nim | 25 ++++++- nimbus_verified_proxy/rpc/evm.nim | 69 +++++++++++++++++++ nimbus_verified_proxy/rpc/rpc_eth_api.nim | 60 +++++++++++++++- nimbus_verified_proxy/types.nim | 8 ++- 5 files changed, 158 insertions(+), 5 deletions(-) create mode 100644 nimbus_verified_proxy/rpc/evm.nim diff --git a/nimbus_verified_proxy/nim.cfg b/nimbus_verified_proxy/nim.cfg index 2e06d07ecd..45e7bd2689 100644 --- a/nimbus_verified_proxy/nim.cfg +++ b/nimbus_verified_proxy/nim.cfg @@ -6,6 +6,7 @@ # at your option. This file may not be copied, modified, or distributed except according to those terms. -d:"chronicles_runtime_filtering=on" +-d:stateless -d:"chronicles_disable_thread_id" @if release: diff --git a/nimbus_verified_proxy/nimbus_verified_proxy.nim b/nimbus_verified_proxy/nimbus_verified_proxy.nim index 13ead324ad..d5547dc4f2 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy.nim @@ -24,6 +24,7 @@ import ../execution_chain/rpc/cors, ../execution_chain/common/common, ./types, + ./rpc/evm, ./rpc/rpc_eth_api, ./nimbus_verified_proxy_conf, ./header_store @@ -55,6 +56,18 @@ func getConfiguredChainId(networkMetadata: Eth2NetworkMetadata): UInt256 = else: return networkMetadata.cfg.DEPOSIT_CHAIN_ID.u256 +func chainIdToNetworkId(chainId: UInt256): Result[UInt256, string] = + if chainId == 1.u256: + ok(1.u256) + elif chainId == 11155111.u256: + ok(11155111.u256) + elif chainId == 17000.u256: + ok(17000.u256) + elif chainId == 560048.u256: + ok(560048.u256) + else: + return err("Unknown chainId") + proc run*( config: VerifiedProxyConf, ctx: ptr Context ) {.raises: [CatchableError], gcsafe.} = @@ -84,9 +97,15 @@ proc run*( # header cache contains headers downloaded from p2p headerStore = HeaderStore.new(config.cacheLen) - # TODO: add config object to verified proxy for future config options - let verifiedProxy = - VerifiedRpcProxy.init(rpcProxy, headerStore, chainId, config.maxBlockWalk) + # TODO: add config object to verified proxy for future config options + verifiedProxy = + VerifiedRpcProxy.init(rpcProxy, headerStore, chainId, config.maxBlockWalk) + + # instantiate evm + networkId = chainIdToNetworkId(chainId).valueOr: + raise newException(ValueError, error) + + verifiedProxy.evm = AsyncEvm.init(verifiedProxy.toAsyncEvmStateBackend(), networkId) # add handlers that verify RPC calls /rpc/rpc_eth_api.nim verifiedProxy.installEthApiHandlers() diff --git a/nimbus_verified_proxy/rpc/evm.nim b/nimbus_verified_proxy/rpc/evm.nim new file mode 100644 index 0000000000..b601eb307a --- /dev/null +++ b/nimbus_verified_proxy/rpc/evm.nim @@ -0,0 +1,69 @@ +# nimbus_verified_proxy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import + chronicles, + ../../portal/evm/async_evm_backend, + ../../portal/evm/async_evm, + ./accounts, + ../types + +logScope: + topics = "verified_proxy_evm" + +export async_evm, async_evm_backend + +proc toAsyncEvmStateBackend*(vp: VerifiedRpcProxy): AsyncEvmStateBackend = + let + accProc = proc( + header: Header, address: Address + ): Future[Opt[Account]] {.async: (raises: [CancelledError]).} = + let account = + try: + (await vp.getAccount(address, header.number, header.stateRoot)) + except CatchableError as e: + error "error getting account" + return Opt.none(Account) + + if account.isOk(): + return Opt.some(account.get()) + + Opt.none(Account) + + storageProc = proc( + header: Header, address: Address, slotKey: UInt256 + ): Future[Opt[UInt256]] {.async: (raises: [CancelledError]).} = + let storageSlot = + try: + (await vp.getStorageAt(address, slotKey, header.number, header.stateRoot)) + except CatchableError as e: + error "error getting storage" + return Opt.none(UInt256) + + if storageSlot.isOk(): + return Opt.some(storageSlot.get()) + + Opt.none(UInt256) + + codeProc = proc( + header: Header, address: Address + ): Future[Opt[seq[byte]]] {.async: (raises: [CancelledError]).} = + let code = + try: + (await vp.getCode(address, header.number, header.stateRoot)) + except CatchableError as e: + error "error getting code" + return Opt.none(seq[byte]) + + if code.isOk(): + return Opt.some(code.get()) + + Opt.none(seq[byte]) + + AsyncEvmStateBackend.init(accProc, storageProc, codeProc) diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 87d9f2bac5..87f2b1cd1b 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -16,7 +16,8 @@ import ../types, ../header_store, ./accounts, - ./blocks + ./blocks, + ./evm logScope: topics = "verified_proxy" @@ -138,6 +139,63 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = x.tx + vp.proxy.rpc("eth_call") do( + tx: TransactionArgs, blockTag: BlockTag, optimisticStateFetch: Opt[bool] + ) -> seq[byte]: + if tx.to.isNone(): + raise newException(ValueError, "to address is required") + + let + header = (await vp.getHeader(blockTag)).valueOr: + raise newException(ValueError, error) + + optimisticStateFetch = optimisticStateFetch.valueOr: + true + + let callResult = (await vp.evm.call(header, tx, optimisticStateFetch)).valueOr: + raise newException(ValueError, error) + + return callResult.output + + vp.proxy.rpc("eth_createAccessList") do( + tx: TransactionArgs, blockTag: BlockTag, optimisticStateFetch: Opt[bool] + ) -> AccessListResult: + if tx.to.isNone(): + raise newException(ValueError, "to address is required") + + let + header = (await vp.getHeader(blockTag)).valueOr: + raise newException(ValueError, error) + + optimisticStateFetch = optimisticStateFetch.valueOr: + true + + let (accessList, error, gasUsed) = ( + await vp.evm.createAccessList(header, tx, optimisticStateFetch) + ).valueOr: + raise newException(ValueError, error) + + return + AccessListResult(accessList: accessList, error: error, gasUsed: gasUsed.Quantity) + + vp.proxy.rpc("eth_estimateGas") do( + tx: TransactionArgs, blockTag: BlockTag, optimisticStateFetch: Opt[bool] + ) -> Quantity: + if tx.to.isNone(): + raise newException(ValueError, "to address is required") + + let + header = (await vp.getHeader(blockTag)).valueOr: + raise newException(ValueError, error) + + optimisticStateFetch = optimisticStateFetch.valueOr: + true + + let gasEstimate = (await vp.evm.estimateGas(header, tx, optimisticStateFetch)).valueOr: + raise newException(ValueError, error) + + return gasEstimate.Quantity + # TODO: # Following methods are forwarded directly to the web3 provider and therefore # are not validated in any way. diff --git a/nimbus_verified_proxy/types.nim b/nimbus_verified_proxy/types.nim index 70e693ce22..1265b3894d 100644 --- a/nimbus_verified_proxy/types.nim +++ b/nimbus_verified_proxy/types.nim @@ -5,10 +5,16 @@ # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. -import stint, json_rpc/[rpcclient, rpcproxy], web3/eth_api_types, ./header_store +import + json_rpc/[rpcproxy, rpcclient], + stint, + ./header_store, + ../portal/evm/async_evm, + web3/eth_api_types type VerifiedRpcProxy* = ref object + evm*: AsyncEvm proxy*: RpcProxy headerStore*: HeaderStore chainId*: UInt256 From b5e5b5e16e1a4fa3f8a21df0e7c78ba5bc5b5634 Mon Sep 17 00:00:00 2001 From: andri lim Date: Wed, 9 Jul 2025 03:02:55 +0700 Subject: [PATCH 132/138] Add missing finalized marker to updateBase (#3459) --- execution_chain/core/chain/forked_chain.nim | 3 ++ .../chain/forked_chain/chain_serialize.nim | 3 ++ tests/test_forked_chain.nim | 28 +++++++++++++++++++ tests/test_forked_chain/chain_debug.nim | 3 ++ 4 files changed, 37 insertions(+) diff --git a/execution_chain/core/chain/forked_chain.nim b/execution_chain/core/chain/forked_chain.nim index 92d90e9bb4..b26434f6d2 100644 --- a/execution_chain/core/chain/forked_chain.nim +++ b/execution_chain/core/chain/forked_chain.nim @@ -334,6 +334,9 @@ proc updateBase(c: ForkedChainRef, base: BlockRef): uint = c.base = base c.base.parent = nil + # Base block always have finalized marker + c.base.finalize() + count proc processUpdateBase(c: ForkedChainRef) {.async: (raises: [CancelledError]).} = diff --git a/execution_chain/core/chain/forked_chain/chain_serialize.nim b/execution_chain/core/chain/forked_chain/chain_serialize.nim index 848dd257f7..c68c222d91 100644 --- a/execution_chain/core/chain/forked_chain/chain_serialize.nim +++ b/execution_chain/core/chain/forked_chain/chain_serialize.nim @@ -165,6 +165,9 @@ proc replay(fc: ForkedChainRef): Result[void, string] = # see `receiptsByBlockHash` fc.base.txFrame = fc.baseTxFrame + # Base block always have finalized marker + fc.base.finalize() + for head in fc.heads: loopIt(head): if it.txFrame.isNil.not: diff --git a/tests/test_forked_chain.nim b/tests/test_forked_chain.nim index b84e4b83b8..6799a6520f 100644 --- a/tests/test_forked_chain.nim +++ b/tests/test_forked_chain.nim @@ -189,6 +189,7 @@ suite "ForkedChainRef tests": blk5 = dbTx.makeBlk(5, blk4) blk6 = dbTx.makeBlk(6, blk5) blk7 = dbTx.makeBlk(7, blk6) + blk8 = dbTx.makeBlk(8, blk7) dbTx.dispose() let B4 = txFrame.makeBlk(4, blk3, 1.byte) @@ -706,6 +707,33 @@ suite "ForkedChainRef tests": check chain.heads.len == 1 check chain.validate info & " (2)" + test "newBase move forward, auto mode, base finalized marker needed": + const info = "newBase move forward, auto mode, base finalized marker needed" + let com = env.newCom() + var chain = ForkedChainRef.init(com, baseDistance = 2, persistBatchSize = 1) + check (waitFor chain.forkChoice(blk8.blockHash, blk8.blockHash)).isErr + check chain.tryUpdatePendingFCU(blk8.blockHash, blk8.header.number) + checkImportBlock(chain, blk1) + checkImportBlock(chain, blk2) + checkImportBlock(chain, blk3) + checkImportBlock(chain, B4) + checkImportBlock(chain, blk4) + checkImportBlock(chain, B5) + checkImportBlock(chain, C5) + checkImportBlock(chain, blk5) + checkImportBlock(chain, blk6) + checkImportBlock(chain, blk7) + checkImportBlock(chain, blk8) + + check chain.validate info & " (1)" + + checkHeadHash chain, blk5.blockHash + check chain.latestHash == blk8.blockHash + + check chain.baseNumber == 5'u64 + check chain.heads.len == 1 + check chain.validate info & " (2)" + test "serialize roundtrip": const info = "serialize roundtrip" let com = env.newCom() diff --git a/tests/test_forked_chain/chain_debug.nim b/tests/test_forked_chain/chain_debug.nim index e7e6888931..ec6036b3f9 100644 --- a/tests/test_forked_chain/chain_debug.nim +++ b/tests/test_forked_chain/chain_debug.nim @@ -91,6 +91,9 @@ func validate*(c: ForkedChainRef): Result[void,string] = if not c.hashToBlock.hasKey(c.baseHash): return err("base must be in hashToBlock[] table: " & $c.baseNumber) + if not c.base.finalized: + return err("base must have finalized marker") + # Base chains must range inside `(base,head]`, rooted on `base` for chain in c.baseChains: if chain[^1] != c.baseHash: From d93ffb36b458c00de4c9ae34e111b1d930347aa5 Mon Sep 17 00:00:00 2001 From: bhartnett <51288821+bhartnett@users.noreply.github.com> Date: Wed, 9 Jul 2025 08:53:36 +0800 Subject: [PATCH 133/138] Stateless: Witness generation, clearing of witness keys and db persist and get helpers (#3458) * Implement function to build execution witness from witness keys and proofs from database. * Clear witness keys in ledger after processing each block. * Add functions to persist and get witness by block hash. * Rename/restructure witness files. --- .../core/executor/process_block.nim | 1 + execution_chain/db/core_db/core_apps.nim | 12 ++ execution_chain/db/ledger.nim | 18 ++- execution_chain/db/storage_types.nim | 6 + .../stateless/witness_generation.nim | 62 +++++++ .../{witness.nim => witness_types.nim} | 45 +++++- tests/all_tests.nim | 3 +- tests/test_ledger.nim | 23 +++ tests/test_stateless_witness_generation.nim | 153 ++++++++++++++++++ ...s.nim => test_stateless_witness_types.nim} | 36 ++++- 10 files changed, 341 insertions(+), 18 deletions(-) create mode 100644 execution_chain/stateless/witness_generation.nim rename execution_chain/stateless/{witness.nim => witness_types.nim} (57%) create mode 100644 tests/test_stateless_witness_generation.nim rename tests/{test_stateless_witness.nim => test_stateless_witness_types.nim} (55%) diff --git a/execution_chain/core/executor/process_block.nim b/execution_chain/core/executor/process_block.nim index 644eb112f4..812acfa74f 100644 --- a/execution_chain/core/executor/process_block.nim +++ b/execution_chain/core/executor/process_block.nim @@ -198,6 +198,7 @@ proc procBlkEpilogue( db.persist( clearEmptyAccount = vmState.com.isSpuriousOrLater(header.number), clearCache = true, + clearWitness = vmState.com.statelessProviderEnabled ) var diff --git a/execution_chain/db/core_db/core_apps.nim b/execution_chain/db/core_db/core_apps.nim index 966d13f804..0187b5a8af 100644 --- a/execution_chain/db/core_db/core_apps.nim +++ b/execution_chain/db/core_db/core_apps.nim @@ -20,6 +20,7 @@ import stew/byteutils, results, "../.."/[constants], + "../.."/stateless/witness_types, ".."/[aristo, storage_types], "."/base @@ -622,6 +623,17 @@ proc persistUncles*(db: CoreDbTxRef, uncles: openArray[Header]): Hash32 = warn "persistUncles()", unclesHash=result, error=($$error) return EMPTY_ROOT_HASH +proc persistWitness*(db: CoreDbTxRef, blockHash: Hash32, witness: Witness): Result[void, string] = + db.put(blockHashToWitnessKey(blockHash).toOpenArray, witness.encode()).isOkOr: + return err("persistWitness: " & $$error) + ok() + +proc getWitness*(db: CoreDbTxRef, blockHash: Hash32): Result[Witness, string] = + let witnessBytes = db.get(blockHashToWitnessKey(blockHash).toOpenArray).valueOr: + return err("getWitness: " & $$error) + + Witness.decode(witnessBytes) + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/ledger.nim b/execution_chain/db/ledger.nim index 4e51ee8530..5ec9875444 100644 --- a/execution_chain/db/ledger.nim +++ b/execution_chain/db/ledger.nim @@ -703,9 +703,16 @@ proc clearEmptyAccounts(ac: LedgerRef) = ac.deleteEmptyAccount(RIPEMD_ADDR) ac.ripemdSpecial = false +template getWitnessKeys*(ac: LedgerRef): WitnessTable = + ac.witnessKeys + +template clearWitnessKeys*(ac: LedgerRef) = + ac.witnessKeys.clear() + proc persist*(ac: LedgerRef, clearEmptyAccount: bool = false, - clearCache = false) = + clearCache = false, + clearWitness = false) = const info = "persist(): " # make sure all savepoint already committed @@ -757,6 +764,9 @@ proc persist*(ac: LedgerRef, ac.isDirty = false + if clearWitness: + ac.clearWitnessKeys() + iterator addresses*(ac: LedgerRef): Address = # make sure all savepoint already committed doAssert(ac.savePoint.parentSavepoint.isNil) @@ -878,12 +888,6 @@ proc getStorageProof*(ac: LedgerRef, address: Address, slots: openArray[UInt256] storageProof -func getWitnessKeys*(ac: LedgerRef): WitnessTable = - ac.witnessKeys - -proc clearWitnessKeys*(ac: LedgerRef) = - ac.witnessKeys.clear() - # ------------------------------------------------------------------------------ # Public virtual read-only methods # ------------------------------------------------------------------------------ diff --git a/execution_chain/db/storage_types.nim b/execution_chain/db/storage_types.nim index 96631b763f..bffeb8341e 100644 --- a/execution_chain/db/storage_types.nim +++ b/execution_chain/db/storage_types.nim @@ -28,6 +28,7 @@ type fcState = 9 beaconHeader = 10 wdKey = 11 + witness = 12 DbKey* = object # The first byte stores the key type. The rest are key-specific values @@ -104,6 +105,11 @@ func withdrawalsKey*(h: Hash32): DbKey {.inline.} = result.data[1 .. 32] = h.data result.dataEndPos = uint8 32 +func blockHashToWitnessKey*(h: Hash32): DbKey {.inline.} = + result.data[0] = byte ord(witness) + result.data[1 .. 32] = h.data + result.dataEndPos = uint8 32 + template toOpenArray*(k: DbKey): openArray[byte] = k.data.toOpenArray(0, int(k.dataEndPos)) diff --git a/execution_chain/stateless/witness_generation.nim b/execution_chain/stateless/witness_generation.nim new file mode 100644 index 0000000000..8ea93e03c7 --- /dev/null +++ b/execution_chain/stateless/witness_generation.nim @@ -0,0 +1,62 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) +# * MIT license ([LICENSE-MIT](LICENSE-MIT)) +# at your option. +# This file may not be copied, modified, or distributed except according to +# those terms. + +{.push raises: [].} + +import + std/[tables, sets], + eth/common, + ../db/ledger, + ./witness_types + +export + common, + ledger, + witness_types + +proc build*( + T: type Witness, + ledger: LedgerRef, + codes: var seq[seq[byte]]): T = + var + witness = Witness.init() + addedStateHashes = initHashSet[Hash32]() + addedCodeHashes = initHashSet[Hash32]() + + for key, codeTouched in ledger.getWitnessKeys(): + let (adr, maybeSlot) = key + if maybeSlot.isSome(): + let slot = maybeSlot.get() + witness.addKey(slot.toBytesBE()) + + let proofs = ledger.getStorageProof(key.address, @[slot]) + doAssert(proofs.len() == 1) + for trieNode in proofs[0]: + let nodeHash = keccak256(trieNode) + if nodeHash notin addedStateHashes: + witness.addState(trieNode) + addedStateHashes.incl(nodeHash) + else: + witness.addKey(key.address.data()) + + let proof = ledger.getAccountProof(key.address) + for trieNode in proof: + let nodeHash = keccak256(trieNode) + if nodeHash notin addedStateHashes: + witness.addState(trieNode) + addedStateHashes.incl(nodeHash) + + if codeTouched: + let (codeHash, code) = ledger.getCode(key.address, returnHash = true) + if codeHash != EMPTY_CODE_HASH and codeHash notin addedCodeHashes: + codes.add(code.bytes) + witness.addCodeHash(codeHash) + addedCodeHashes.incl(codeHash) + + witness diff --git a/execution_chain/stateless/witness.nim b/execution_chain/stateless/witness_types.nim similarity index 57% rename from execution_chain/stateless/witness.nim rename to execution_chain/stateless/witness_types.nim index ccb59292e3..99e4957a6d 100644 --- a/execution_chain/stateless/witness.nim +++ b/execution_chain/stateless/witness_types.nim @@ -7,6 +7,8 @@ # This file may not be copied, modified, or distributed except according to # those terms. +{.push raises: [].} + import eth/common, eth/rlp, @@ -16,24 +18,57 @@ export common, results -{.push raises: [].} - type + Witness* = object + state*: seq[seq[byte]] # MPT trie nodes accessed while executing the block. + keys*: seq[seq[byte]] # Ordered list of access keys (address bytes or storage slots bytes). + codeHashes*: seq[Hash32] # Code hashes of the bytecode required by the witness. + headerHashes*: seq[Hash32] # Hashes of block headers which are required by the witness. + ExecutionWitness* = object state*: seq[seq[byte]] # MPT trie nodes accessed while executing the block. - codes*: seq[seq[byte]] # Contract bytecodes read while executing the block. keys*: seq[seq[byte]] # Ordered list of access keys (address bytes or storage slots bytes). + codes*: seq[seq[byte]] # Contract bytecodes read while executing the block. headers*: seq[Header] # Block headers required for proving correctness of stateless execution. # Stores the parent block headers needed to verify that the state reads are correct with respect # to the pre-state root. +func init*( + T: type Witness, + state = newSeq[seq[byte]](), + keys = newSeq[seq[byte]](), + codeHashes = newSeq[Hash32](), + headerHashes = newSeq[Hash32]()): T = + Witness(state: state, keys: keys, headerHashes: headerHashes) + +template addState*(witness: var Witness, trieNode: seq[byte]) = + witness.state.add(trieNode) + +template addKey*(witness: var Witness, key: openArray[byte]) = + witness.keys.add(@key) + +template addCodeHash*(witness: var Witness, codeHash: Hash32) = + witness.codeHashes.add(codeHash) + +template addHeaderHash*(witness: var Witness, headerHash: Hash32) = + witness.headerHashes.add(headerHash) + +func encode*(witness: Witness): seq[byte] = + rlp.encode(witness) + +func decode*(T: type Witness, witnessBytes: openArray[byte]): Result[T, string] = + try: + ok(rlp.decode(witnessBytes, T)) + except RlpError as e: + err(e.msg) + func init*( T: type ExecutionWitness, state = newSeq[seq[byte]](), - codes = newSeq[seq[byte]](), keys = newSeq[seq[byte]](), + codes = newSeq[seq[byte]](), headers = newSeq[Header]()): T = - ExecutionWitness(state: state, codes: codes, keys: keys, headers: headers) + ExecutionWitness(state: state, keys: keys, codes: codes, headers: headers) template addState*(witness: var ExecutionWitness, trieNode: seq[byte]) = witness.state.add(trieNode) diff --git a/tests/all_tests.nim b/tests/all_tests.nim index 2cc42f5282..6dda5a4e5a 100644 --- a/tests/all_tests.nim +++ b/tests/all_tests.nim @@ -36,7 +36,8 @@ import test_txpool, test_networking, test_pooled_tx, - test_stateless_witness, + test_stateless_witness_types, + test_stateless_witness_generation, # These two suites are much slower than all the rest, so run them last test_blockchain_json, test_generalstate_json, diff --git a/tests/test_ledger.nim b/tests/test_ledger.nim index 45523c41b3..58d751aaba 100644 --- a/tests/test_ledger.nim +++ b/tests/test_ledger.nim @@ -754,6 +754,29 @@ proc runLedgerBasicOperationsTests() = keysList[4][0].slot == Opt.none(UInt256) keysList[4][1] == false + test "Witness keys - Clear cache": + var + ac = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + addr1 = initAddr(1) + addr2 = initAddr(2) + addr3 = initAddr(3) + slot1 = 1.u256 + + discard ac.getAccount(addr1) + discard ac.getCode(addr2) + discard ac.getCode(addr1) + discard ac.getStorage(addr2, slot1) + discard ac.getStorage(addr1, slot1) + discard ac.getStorage(addr2, slot1) + discard ac.getAccount(addr3) + + check ac.getWitnessKeys().len() == 5 + + ac.persist() # persist should not clear the witness keys by default + check ac.getWitnessKeys().len() == 5 + + ac.persist(clearWitness = true) + check ac.getWitnessKeys().len() == 0 # ------------------------------------------------------------------------------ # Main function(s) diff --git a/tests/test_stateless_witness_generation.nim b/tests/test_stateless_witness_generation.nim new file mode 100644 index 0000000000..09f58f27a9 --- /dev/null +++ b/tests/test_stateless_witness_generation.nim @@ -0,0 +1,153 @@ +# Nimbus +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or +# distributed except according to those terms. + +{.used.} + +import + stew/byteutils, + chronicles, + unittest2, + ../execution_chain/common/common, + ../execution_chain/stateless/witness_generation + +suite "Stateless: Witness Generation": + setup: + let + memDB = newCoreDbRef DefaultDbMemory + ledger = LedgerRef.init(memDB.baseTxFrame(), false, collectWitness = true) + code = hexToSeqByte("0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6") + addr1 = address"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" + slot1 = 1.u256 + slot2 = 2.u256 + slot3 = 3.u256 + + test "Get account": + ledger.setBalance(addr1, 10.u256) + ledger.persist(clearWitness = true) + + discard ledger.getBalance(addr1) + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 1 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 1 + witness.codeHashes.len() == 0 + codes.len() == 0 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + + test "Get code": + ledger.setCode(addr1, code) + ledger.persist(clearWitness = true) + + discard ledger.getCode(addr1) + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 1 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 1 + witness.codeHashes.len() == 1 + codes.len() == 1 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + + test "Set storage": + ledger.setStorage(addr1, slot1, 20.u256) + ledger.persist() + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 2 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 2 + witness.codeHashes.len() == 0 + codes.len() == 0 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot1))) + + test "Get storage": + ledger.setStorage(addr1, slot1, 20.u256) + ledger.persist(clearWitness = true) + + discard ledger.getStorage(addr1, slot1) + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 2 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 2 + witness.codeHashes.len() == 0 + codes.len() == 0 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot1))) + + test "Get committed storage": + ledger.setStorage(addr1, slot1, 20.u256) + ledger.persist(clearWitness = true) + + discard ledger.getCommittedStorage(addr1, slot1) + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 2 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 2 + witness.codeHashes.len() == 0 + codes.len() == 0 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot1))) + + test "Get code and storage slots": + ledger.setCode(addr1, code) + ledger.setStorage(addr1, slot1, 100.u256) + ledger.setStorage(addr1, slot2, 200.u256) + ledger.setStorage(addr1, slot3, 300.u256) + ledger.persist(clearWitness = true) + + discard ledger.getCode(addr1) + discard ledger.getStorage(addr1, slot1) + discard ledger.getStorage(addr1, slot2) + discard ledger.getStorage(addr1, slot3) + + let witnessKeys = ledger.getWitnessKeys() + check witnessKeys.len() == 4 + + var codes: seq[seq[byte]] + let witness = Witness.build(ledger, codes) + + check: + witness.state.len() > 0 + witness.keys.len() == 4 + witness.codeHashes.len() == 1 + codes.len() == 1 + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.none(UInt256))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot1))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot2))) + witnessKeys.contains((Address.copyFrom(witness.keys[0]), Opt.some(slot3))) diff --git a/tests/test_stateless_witness.nim b/tests/test_stateless_witness_types.nim similarity index 55% rename from tests/test_stateless_witness.nim rename to tests/test_stateless_witness_types.nim index dc4b97e107..c6344859c6 100644 --- a/tests/test_stateless_witness.nim +++ b/tests/test_stateless_witness_types.nim @@ -12,11 +12,37 @@ import unittest2, - ../execution_chain/stateless/witness + ../execution_chain/stateless/witness_types -suite "Execution Witness Tests": +suite "Stateless: Witness Types": - test "Encoding/decoding empty witness": + test "Encoding/decoding empty Witness": + var witness: Witness + + let witnessBytes = witness.encode() + check witnessBytes.len() > 0 + + let decodedWitness = Witness.decode(witnessBytes) + check: + decodedWitness.isOk() + decodedWitness.get() == witness + + test "Encoding/decoding Witness": + var witness = Witness.init() + witness.addState(@[0x1.byte, 0x2, 0x3]) + witness.addKey(@[0x7.byte, 0x8, 0x9]) + witness.addCodeHash(EMPTY_ROOT_HASH) + witness.addHeaderHash(EMPTY_ROOT_HASH) + + let witnessBytes = witness.encode() + check witnessBytes.len() > 0 + + let decodedWitness = Witness.decode(witnessBytes) + check: + decodedWitness.isOk() + decodedWitness.get() == witness + + test "Encoding/decoding empty ExecutionWitness": var witness: ExecutionWitness let witnessBytes = witness.encode() @@ -27,11 +53,11 @@ suite "Execution Witness Tests": decodedWitness.isOk() decodedWitness.get() == witness - test "Encoding/decoding witness": + test "Encoding/decoding ExecutionWitness": var witness = ExecutionWitness.init() witness.addState(@[0x1.byte, 0x2, 0x3]) - witness.addCode(@[0x4.byte, 0x5, 0x6]) witness.addKey(@[0x7.byte, 0x8, 0x9]) + witness.addCode(@[0x4.byte, 0x5, 0x6]) witness.addHeader(Header()) let witnessBytes = witness.encode() From 7a8e83f7c104034c374fb14af53fcd9acc777d75 Mon Sep 17 00:00:00 2001 From: bhartnett Date: Tue, 10 Jun 2025 19:47:45 +0800 Subject: [PATCH 134/138] Add caching for state lookups and pre-fetch state using eth_createAccessList. --- nimbus_verified_proxy/rpc/accounts.nim | 103 ++++++++++++++++++++-- nimbus_verified_proxy/rpc/rpc_eth_api.nim | 30 ++++++- nimbus_verified_proxy/types.nim | 28 +++++- 3 files changed, 152 insertions(+), 9 deletions(-) diff --git a/nimbus_verified_proxy/rpc/accounts.nim b/nimbus_verified_proxy/rpc/accounts.nim index b572a05896..47f46ecc2c 100644 --- a/nimbus_verified_proxy/rpc/accounts.nim +++ b/nimbus_verified_proxy/rpc/accounts.nim @@ -68,8 +68,8 @@ proc getStorageFromProof( of InvalidProof: return err(proofResult.errorMsg) -proc getStorageFromProof*( - stateRoot: Hash32, requestedSlot: UInt256, proof: ProofResponse +proc getStorageFromProof( + stateRoot: Hash32, requestedSlot: UInt256, proof: ProofResponse, storageProofIndex = 0 ): Result[UInt256, string] = let account = ?getAccountFromProof( @@ -82,10 +82,10 @@ proc getStorageFromProof*( # return 0 value return ok(u256(0)) - if len(proof.storageProof) != 1: + if proof.storageProof.len() <= storageProofIndex: return err("no storage proof for requested slot") - let storageProof = proof.storageProof[0] + let storageProof = proof.storageProof[storageProofIndex] if len(storageProof.proof) == 0: return err("empty mpt proof for account with not empty storage") @@ -100,7 +100,13 @@ proc getAccount*( address: Address, blockNumber: base.BlockNumber, stateRoot: Root, -): Future[Result[Account, string]] {.async.} = +): Future[Result[Account, string]] {.async: (raises: []).} = + let + cacheKey = (stateRoot, address) + cachedAcc = lcProxy.accountsCache.get(cacheKey) + if cachedAcc.isSome(): + return ok(cachedAcc.get()) + info "Forwarding eth_getAccount", blockNumber let @@ -115,6 +121,9 @@ proc getAccount*( proof.storageHash, proof.accountProof, ) + if account.isOk(): + lcProxy.accountsCache.put(cacheKey, account.get()) + return account proc getCode*( @@ -131,6 +140,12 @@ proc getCode*( if account.codeHash == EMPTY_CODE_HASH: return ok(newSeq[byte]()) + let + cacheKey = (stateRoot, address) + cachedCode = lcProxy.codeCache.get(cacheKey) + if cachedCode.isSome(): + return ok(cachedCode.get()) + info "Forwarding eth_getCode", blockNumber let code = @@ -142,6 +157,7 @@ proc getCode*( # verify the byte code. since we verified the account against # the state root we just need to verify the code hash if account.codeHash == keccak256(code): + lcProxy.codeCache.put(cacheKey, code) return ok(code) else: return err("received code doesn't match the account code hash") @@ -152,7 +168,13 @@ proc getStorageAt*( slot: UInt256, blockNumber: base.BlockNumber, stateRoot: Root, -): Future[Result[UInt256, string]] {.async.} = +): Future[Result[UInt256, string]] {.async: (raises: []).} = + let + cacheKey = (stateRoot, address, slot) + cachedSlotValue = lcProxy.storageCache.get(cacheKey) + if cachedSlotValue.isSome(): + return ok(cachedSlotValue.get()) + info "Forwarding eth_getStorageAt", blockNumber let @@ -164,4 +186,73 @@ proc getStorageAt*( slotValue = getStorageFromProof(stateRoot, slot, proof) + if slotValue.isOk(): + lcProxy.storageCache.put(cacheKey, slotValue.get()) + return slotValue + +proc populateCachesForAccountAndSlots( + lcProxy: VerifiedRpcProxy, + address: Address, + slots: seq[UInt256], + blockNumber: base.BlockNumber, + stateRoot: Root, +): Future[Result[void, string]] {.async: (raises: []).} = + + var slotsToFetch: seq[UInt256] + for s in slots: + let storageCacheKey = (stateRoot, address, s) + if lcProxy.storageCache.get(storageCacheKey).isNone(): + slotsToFetch.add(s) + + let accountCacheKey = (stateRoot, address) + + if lcProxy.accountsCache.get(accountCacheKey).isNone() or slotsToFetch.len() > 0: + let + proof = + try: + await lcProxy.rpcClient.eth_getProof(address, slotsToFetch, blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) + account = getAccountFromProof( + stateRoot, proof.address, proof.balance, proof.nonce, proof.codeHash, + proof.storageHash, proof.accountProof, + ) + + if account.isOk(): + lcProxy.accountsCache.put(accountCacheKey, account.get()) + + for i, s in slotsToFetch: + let slotValue = getStorageFromProof(stateRoot, s, proof, i) + + if slotValue.isOk(): + let storageCacheKey = (stateRoot, address, s) + lcProxy.storageCache.put(storageCacheKey, slotValue.get()) + + ok() + +proc populateCachesUsingAccessList*( + lcProxy: VerifiedRpcProxy, + blockNumber: base.BlockNumber, + stateRoot: Root, + tx: TransactionArgs +): Future[Result[void, string]] {.async: (raises: []).} = + + let + accessListRes: AccessListResult = + try: + await lcProxy.rpcClient.eth_createAccessList(tx, blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) + + var futs = newSeqOfCap[Future[Result[void, string]]](accessListRes.accessList.len()) + for accessPair in accessListRes.accessList: + let slots = accessPair.storageKeys.mapIt(UInt256.fromBytesBE(it.data)) + futs.add lcProxy.populateCachesForAccountAndSlots(accessPair.address, slots, blockNumber, stateRoot) + + try: + await allFutures(futs) + except CatchableError as e: + return err(e.msg) + + ok() diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 87f2b1cd1b..2729b00498 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -148,10 +148,18 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = let header = (await vp.getHeader(blockTag)).valueOr: raise newException(ValueError, error) - optimisticStateFetch = optimisticStateFetch.valueOr: true + # Start fetching code to get it in the code cache + discard vp.getCode(tx.to.get(), header.number, header.stateRoot) + + # As a performance optimisation we concurrently pre-fetch the state needed + # for the call by calling eth_createAccessList and then using the returned + # access list keys to fetch the required state using eth_getProof. + (await vp.populateCachesUsingAccessList(header.number, header.stateRoot, tx)).isOkOr: + raise newException(ValueError, error) + let callResult = (await vp.evm.call(header, tx, optimisticStateFetch)).valueOr: raise newException(ValueError, error) @@ -166,10 +174,18 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = let header = (await vp.getHeader(blockTag)).valueOr: raise newException(ValueError, error) - optimisticStateFetch = optimisticStateFetch.valueOr: true + # Start fetching code to get it in the code cache + discard vp.getCode(tx.to.get(), header.number, header.stateRoot) + + # As a performance optimisation we concurrently pre-fetch the state needed + # for the call by calling eth_createAccessList and then using the returned + # access list keys to fetch the required state using eth_getProof. + (await vp.populateCachesUsingAccessList(header.number, header.stateRoot, tx)).isOkOr: + raise newException(ValueError, error) + let (accessList, error, gasUsed) = ( await vp.evm.createAccessList(header, tx, optimisticStateFetch) ).valueOr: @@ -191,6 +207,16 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = optimisticStateFetch = optimisticStateFetch.valueOr: true + # Start fetching code to get it in the code cache + discard vp.getCode(tx.to.get(), header.number, header.stateRoot) + + # As a performance optimisation we concurrently pre-fetch the state needed + # for the call by calling eth_createAccessList and then using the returned + # access list keys to fetch the required state using eth_getProof. + (await vp.populateCachesUsingAccessList(header.number, header.stateRoot, tx)).isOkOr: + raise newException(ValueError, error) + + let gasEstimate = (await vp.evm.estimateGas(header, tx, optimisticStateFetch)).valueOr: raise newException(ValueError, error) diff --git a/nimbus_verified_proxy/types.nim b/nimbus_verified_proxy/types.nim index 1265b3894d..cf7ff33bdf 100644 --- a/nimbus_verified_proxy/types.nim +++ b/nimbus_verified_proxy/types.nim @@ -8,15 +8,35 @@ import json_rpc/[rpcproxy, rpcclient], stint, + minilru, ./header_store, ../portal/evm/async_evm, web3/eth_api_types +export minilru + +const + ACCOUNTS_CACHE_SIZE = 128 + CODE_CACHE_SIZE = 64 + STORAGE_CACHE_SIZE = 256 + type + AccountsCacheKey* = (Root, Address) + AccountsCache* = LruCache[AccountsCacheKey, Account] + + CodeCacheKey* = (Root, Address) + CodeCache* = LruCache[CodeCacheKey, seq[byte]] + + StorageCacheKey* = (Root, Address, UInt256) + StorageCache* = LruCache[StorageCacheKey, UInt256] + VerifiedRpcProxy* = ref object evm*: AsyncEvm proxy*: RpcProxy headerStore*: HeaderStore + accountsCache*: AccountsCache + codeCache*: CodeCache + storageCache*: StorageCache chainId*: UInt256 maxBlockWalk*: uint64 @@ -33,5 +53,11 @@ proc init*( maxBlockWalk: uint64, ): T = VerifiedRpcProxy( - proxy: proxy, headerStore: headerStore, chainId: chainId, maxBlockWalk: maxBlockWalk + proxy: proxy, + headerStore: headerStore, + accountsCache: AccountsCache.init(ACCOUNTS_CACHE_SIZE), + codeCache: CodeCache.init(CODE_CACHE_SIZE), + storageCache: StorageCache.init(STORAGE_CACHE_SIZE), + chainId: chainId, + maxBlockWalk: maxBlockWalk ) From 5989aa92de387ee76d4758804bc38802c5387a8c Mon Sep 17 00:00:00 2001 From: bhartnett Date: Tue, 10 Jun 2025 19:51:36 +0800 Subject: [PATCH 135/138] Run nph. --- nimbus_verified_proxy/rpc/accounts.nim | 28 +++++++++++++++----------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/nimbus_verified_proxy/rpc/accounts.nim b/nimbus_verified_proxy/rpc/accounts.nim index 47f46ecc2c..3daaf96441 100644 --- a/nimbus_verified_proxy/rpc/accounts.nim +++ b/nimbus_verified_proxy/rpc/accounts.nim @@ -69,7 +69,10 @@ proc getStorageFromProof( return err(proofResult.errorMsg) proc getStorageFromProof( - stateRoot: Hash32, requestedSlot: UInt256, proof: ProofResponse, storageProofIndex = 0 + stateRoot: Hash32, + requestedSlot: UInt256, + proof: ProofResponse, + storageProofIndex = 0, ): Result[UInt256, string] = let account = ?getAccountFromProof( @@ -198,7 +201,6 @@ proc populateCachesForAccountAndSlots( blockNumber: base.BlockNumber, stateRoot: Root, ): Future[Result[void, string]] {.async: (raises: []).} = - var slotsToFetch: seq[UInt256] for s in slots: let storageCacheKey = (stateRoot, address, s) @@ -211,7 +213,9 @@ proc populateCachesForAccountAndSlots( let proof = try: - await lcProxy.rpcClient.eth_getProof(address, slotsToFetch, blockId(blockNumber)) + await lcProxy.rpcClient.eth_getProof( + address, slotsToFetch, blockId(blockNumber) + ) except CatchableError as e: return err(e.msg) account = getAccountFromProof( @@ -235,20 +239,20 @@ proc populateCachesUsingAccessList*( lcProxy: VerifiedRpcProxy, blockNumber: base.BlockNumber, stateRoot: Root, - tx: TransactionArgs + tx: TransactionArgs, ): Future[Result[void, string]] {.async: (raises: []).} = - - let - accessListRes: AccessListResult = - try: - await lcProxy.rpcClient.eth_createAccessList(tx, blockId(blockNumber)) - except CatchableError as e: - return err(e.msg) + let accessListRes: AccessListResult = + try: + await lcProxy.rpcClient.eth_createAccessList(tx, blockId(blockNumber)) + except CatchableError as e: + return err(e.msg) var futs = newSeqOfCap[Future[Result[void, string]]](accessListRes.accessList.len()) for accessPair in accessListRes.accessList: let slots = accessPair.storageKeys.mapIt(UInt256.fromBytesBE(it.data)) - futs.add lcProxy.populateCachesForAccountAndSlots(accessPair.address, slots, blockNumber, stateRoot) + futs.add lcProxy.populateCachesForAccountAndSlots( + accessPair.address, slots, blockNumber, stateRoot + ) try: await allFutures(futs) From 309dbb13f702c557e86fe4f3a5f992bd1768407c Mon Sep 17 00:00:00 2001 From: bhartnett Date: Fri, 13 Jun 2025 09:58:26 +0800 Subject: [PATCH 136/138] Return EVM output data in eth_call when EVM execution reverts. --- nimbus_verified_proxy/rpc/rpc_eth_api.nim | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 2729b00498..9000ca9aad 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -10,6 +10,7 @@ import results, chronicles, + stew/byteutils, json_rpc/[rpcserver, rpcclient, rpcproxy], eth/common/accounts, web3/eth_api, @@ -163,6 +164,13 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = let callResult = (await vp.evm.call(header, tx, optimisticStateFetch)).valueOr: raise newException(ValueError, error) + if callResult.error.len() > 0: + raise (ref ApplicationError)( + code: 3, + msg: callResult.error, + data: Opt.some(JsonString("\"" & callResult.output.to0xHex() & "\"")), + ) + return callResult.output vp.proxy.rpc("eth_createAccessList") do( From bfc5754d6c4b26539cbe3625c8d2e01963723764 Mon Sep 17 00:00:00 2001 From: chirag-parmar Date: Tue, 8 Jul 2025 18:33:48 +0530 Subject: [PATCH 137/138] format rebased code --- nimbus_verified_proxy/rpc/rpc_eth_api.nim | 1 - nimbus_verified_proxy/types.nim | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/nimbus_verified_proxy/rpc/rpc_eth_api.nim b/nimbus_verified_proxy/rpc/rpc_eth_api.nim index 9000ca9aad..c3be570df5 100644 --- a/nimbus_verified_proxy/rpc/rpc_eth_api.nim +++ b/nimbus_verified_proxy/rpc/rpc_eth_api.nim @@ -224,7 +224,6 @@ proc installEthApiHandlers*(vp: VerifiedRpcProxy) = (await vp.populateCachesUsingAccessList(header.number, header.stateRoot, tx)).isOkOr: raise newException(ValueError, error) - let gasEstimate = (await vp.evm.estimateGas(header, tx, optimisticStateFetch)).valueOr: raise newException(ValueError, error) diff --git a/nimbus_verified_proxy/types.nim b/nimbus_verified_proxy/types.nim index cf7ff33bdf..80843f8a9f 100644 --- a/nimbus_verified_proxy/types.nim +++ b/nimbus_verified_proxy/types.nim @@ -59,5 +59,5 @@ proc init*( codeCache: CodeCache.init(CODE_CACHE_SIZE), storageCache: StorageCache.init(STORAGE_CACHE_SIZE), chainId: chainId, - maxBlockWalk: maxBlockWalk + maxBlockWalk: maxBlockWalk, ) From e39c5329e5ec165ef5d25377d34756de7bec90da Mon Sep 17 00:00:00 2001 From: chirag-parmar Date: Tue, 8 Jul 2025 19:02:01 +0530 Subject: [PATCH 138/138] fix rebase --- nimbus_verified_proxy/rpc/accounts.nim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nimbus_verified_proxy/rpc/accounts.nim b/nimbus_verified_proxy/rpc/accounts.nim index 3daaf96441..e1967266de 100644 --- a/nimbus_verified_proxy/rpc/accounts.nim +++ b/nimbus_verified_proxy/rpc/accounts.nim @@ -68,7 +68,7 @@ proc getStorageFromProof( of InvalidProof: return err(proofResult.errorMsg) -proc getStorageFromProof( +proc getStorageFromProof*( stateRoot: Hash32, requestedSlot: UInt256, proof: ProofResponse,