Skip to content

Commit

Permalink
Merge branch 'master' into eip-7702
Browse files Browse the repository at this point in the history
  • Loading branch information
jangko committed Sep 30, 2024
2 parents 7243636 + 8d4b789 commit bec5600
Show file tree
Hide file tree
Showing 287 changed files with 5,856 additions and 6,513 deletions.
1 change: 1 addition & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -240,3 +240,4 @@
[submodule "vendor/nim-minilru"]
path = vendor/nim-minilru
url = https://github.com/status-im/nim-minilru.git
branch = master
17 changes: 10 additions & 7 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,10 @@ EXCLUDED_NIM_PACKAGES := \
TOOLS := \
test_tools_build \
persist \
hunter
hunter \
nrpc
TOOLS_DIRS := \
nrpc \
tests \
premix
# comma-separated values for the "clean" target
Expand Down Expand Up @@ -108,6 +110,7 @@ VERIF_PROXY_OUT_PATH ?= build/libverifproxy/
fluffy \
nimbus_verified_proxy \
libverifproxy \
external_sync \
test \
test-reproducibility \
clean \
Expand Down Expand Up @@ -207,7 +210,7 @@ update-from-ci: | sanity-checks update-test
$(TOOLS): | build deps rocksdb
for D in $(TOOLS_DIRS); do [ -e "$${D}/$@.nim" ] && TOOL_DIR="$${D}" && break; done && \
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim c $(NIM_PARAMS) -o:build/$@ "$${TOOL_DIR}/$@.nim"
$(ENV_SCRIPT) nim c $(NIM_PARAMS) -d:chronicles_log_level=TRACE -o:build/$@ "$${TOOL_DIR}/$@.nim"

# a phony target, because teaching `make` how to do conditional recompilation of Nim projects is too complicated
nimbus: | build deps rocksdb
Expand Down Expand Up @@ -281,17 +284,17 @@ fluffy-test-reproducibility:
{ echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; }

# fluffy tests
all_fluffy_portal_spec_tests: | build deps
all_history_network_custom_chain_tests: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/portal_spec_tests/mainnet/$@.nim"
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/history_network_tests/$@.nim"


all_fluffy_tests: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130 -o:build/$@ "fluffy/tests/$@.nim"
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/$@.nim"

# builds and runs the fluffy test suite
fluffy-test: | all_fluffy_portal_spec_tests all_fluffy_tests
fluffy-test: | all_fluffy_tests all_history_network_custom_chain_tests

# builds the fluffy tools, wherever they are
$(FLUFFY_TOOLS): | build deps rocksdb
Expand Down Expand Up @@ -357,7 +360,7 @@ txparse: | build deps

# usual cleaning
clean: | clean-common
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_fluffy_portal_spec_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
rm -rf tools/t8n/{t8n,t8n_test}
rm -rf tools/evmstate/{evmstate,evmstate_test}
ifneq ($(USE_LIBBACKTRACE), 0)
Expand Down
15 changes: 2 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,8 @@ All consensus-layer client development is happening in parallel in the
Monthly development updates are shared
[here](https://hackmd.io/jRpxY4WBQJ-hnsKaPDYqTw).

Some recent highlights include:
- Renewed funding from the EF to accelerate development
- Completed Berlin and London fork compatibility (EIP-1559). It now passes nearly all the EF Hive testsuite, and 100% of contract execution tests (47,951 tests)
- New GraphQL and WebSocket APIs, complementing JSON-RPC
- EVMC compatibility, supporting third-party optimised EVM plugins
- Up to 100x memory saving during contract executions
- Asynchronous EVM to execute many contracts in parallel, while they wait for data from the network
- Updated network protocols, to work with the latest eth/66-68 protocols
- A prototype new mechanism for state sync which combines what have been called Fast sync, Snap sync and Beam sync in a self-tuning way, and allows the user to participate in the network (read accounts, run transactions etc.) while sync is still in progress
- A significant redesign of the storage database to use less disk space and run faster.

For more detailed write-ups on the development progress, follow the
[Nimbus blog](https://our.status.im/tag/nimbus/).
[Nimbus blog](https://blog.nimbus.team/).

## Building & Testing

Expand Down Expand Up @@ -205,7 +194,7 @@ available.)
cases when the `gc` is involved in a memory corruption or corruption
camouflage.

* ENABLE_LINE_NUMBERS=1
* ENABLE_LINE_NUMBERS=1<br>
Enables logger to print out source code location with log message

* ENABLE_EVMC=1<br>
Expand Down
2 changes: 1 addition & 1 deletion fluffy/common/common_utils.nim
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import

func fromBytes*(T: type KeccakHash, hash: openArray[byte]): T =
doAssert(hash.len() == 32)
KeccakHash(data: array[32, byte].initCopyFrom(hash))
KeccakHash(array[32, byte].initCopyFrom(hash))

iterator strippedLines(filename: string): string {.raises: [ref IOError].} =
for line in lines(filename):
Expand Down
95 changes: 88 additions & 7 deletions fluffy/conf.nim
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,18 @@ const
defaultTableIpLimitDesc* = $defaultPortalProtocolConfig.tableIpLimits.tableIpLimit
defaultBucketIpLimitDesc* = $defaultPortalProtocolConfig.tableIpLimits.bucketIpLimit
defaultBitsPerHopDesc* = $defaultPortalProtocolConfig.bitsPerHop
defaultMaxGossipNodesDesc* = $defaultPortalProtocolConfig.maxGossipNodes
defaultRpcApis* = @["eth", "portal"]
defaultRpcApisDesc* = "eth,portal"

type
RpcFlag* {.pure.} = enum
eth
debug
portal
portal_debug
discovery

TrustedDigest* = MDigest[32 * 8]

PortalCmd* = enum
Expand Down Expand Up @@ -89,7 +99,7 @@ type

portalSubnetworks* {.
desc: "Select which networks (Portal sub-protocols) to enable",
defaultValue: {PortalSubnetwork.history},
defaultValue: {PortalSubnetwork.history, PortalSubnetwork.state},
name: "portal-subnetworks"
.}: set[PortalSubnetwork]

Expand Down Expand Up @@ -174,12 +184,9 @@ type
name: "metrics-port"
.}: Port

rpcEnabled* {.desc: "Enable the JSON-RPC server", defaultValue: false, name: "rpc".}:
bool

rpcPort* {.
desc: "HTTP port for the JSON-RPC server", defaultValue: 8545, name: "rpc-port"
.}: Port
rpcEnabled* {.
desc: "Enable the HTTP JSON-RPC server", defaultValue: false, name: "rpc"
.}: bool

rpcAddress* {.
desc: "Listening address of the RPC server",
Expand All @@ -188,6 +195,34 @@ type
name: "rpc-address"
.}: IpAddress

rpcPort* {.
desc: "Port for the HTTP JSON-RPC server", defaultValue: 8545, name: "rpc-port"
.}: Port

rpcApi* {.
desc:
"Enable specific set of RPC APIs (available: eth, debug, portal, portal_debug, discovery)",
defaultValue: defaultRpcApis,
defaultValueDesc: $defaultRpcApisDesc,
name: "rpc-api"
.}: seq[string]

wsEnabled* {.
desc: "Enable the WebSocket JSON-RPC server", defaultValue: false, name: "ws"
.}: bool

wsPort* {.
desc: "Port for the WebSocket JSON-RPC server",
defaultValue: 8546,
name: "ws-port"
.}: Port

wsCompression* {.
desc: "Enable compression for the WebSocket JSON-RPC server",
defaultValue: false,
name: "ws-compression"
.}: bool

tableIpLimit* {.
hidden,
desc:
Expand Down Expand Up @@ -218,6 +253,14 @@ type
name: "bits-per-hop"
.}: int

maxGossipNodes* {.
hidden,
desc: "The maximum number of nodes to send content to during gossip",
defaultValue: defaultPortalProtocolConfig.maxGossipNodes,
defaultValueDesc: $defaultMaxGossipNodesDesc,
name: "max-gossip-nodes"
.}: int

radiusConfig* {.
desc:
"Radius configuration for a fluffy node. Radius can be either `dynamic` " &
Expand Down Expand Up @@ -341,3 +384,41 @@ chronicles.formatIt(OutDir):
$it
chronicles.formatIt(InputFile):
$it

func processList(v: string, o: var seq[string]) =
## Process comma-separated list of strings.
if len(v) > 0:
for n in v.split({' ', ','}):
if len(n) > 0:
o.add(n)

iterator repeatingList(listOfList: openArray[string]): string =
for strList in listOfList:
var list = newSeq[string]()
processList(strList, list)
for item in list:
yield item

proc getRpcFlags*(rpcApis: openArray[string]): set[RpcFlag] =
if rpcApis.len == 0:
error "No RPC APIs specified"
quit QuitFailure

var rpcFlags: set[RpcFlag]
for apiStr in rpcApis.repeatingList():
case apiStr.toLowerAscii()
of "eth":
rpcFlags.incl RpcFlag.eth
of "debug":
rpcFlags.incl RpcFlag.debug
of "portal":
rpcFlags.incl RpcFlag.portal
of "portal_debug":
rpcFlags.incl RpcFlag.portal_debug
of "discovery":
rpcFlags.incl RpcFlag.discovery
else:
error "Unknown RPC API: ", name = apiStr
quit QuitFailure

rpcFlags
14 changes: 11 additions & 3 deletions fluffy/database/era1_db.nim
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,19 @@

{.push raises: [].}

import std/os, stew/io2, results, ../network/history/accumulator, ../eth_data/era1
import
std/os,
stew/io2,
results,
../network/history/validation/historical_hashes_accumulator,
../eth_data/era1

type Era1DB* = ref object
## The Era1 database manages a collection of era files that together make up
## a linear history of pre-merge execution chain data.
path: string
network: string
accumulator: FinishedAccumulator
accumulator: FinishedHistoricalHashesAccumulator
files: seq[Era1File]

proc getEra1File(db: Era1DB, era: Era1): Result[Era1File, string] =
Expand Down Expand Up @@ -46,7 +51,10 @@ proc getEra1File(db: Era1DB, era: Era1): Result[Era1File, string] =
ok(f)

proc new*(
T: type Era1DB, path: string, network: string, accumulator: FinishedAccumulator
T: type Era1DB,
path: string,
network: string,
accumulator: FinishedHistoricalHashesAccumulator,
): Era1DB =
Era1DB(path: path, network: network, accumulator: accumulator)

Expand Down
6 changes: 3 additions & 3 deletions fluffy/docs/the_fluffy_book/docs/history-content-bridging.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ Run Fluffy and trigger the propagation of data with the
`portal_history_propagateEpochRecords` JSON-RPC API call:

```bash
./build/fluffy --rpc
./build/fluffy --rpc --rpc-api:portal,portal_debug

# From another terminal
curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"portal_history_propagateEpochRecords","params":["./user_data_dir/"]}' http://localhost:8545 | jq
Expand All @@ -116,7 +116,7 @@ accumulators are available on the history network:

Make sure you still have a fluffy instance running, if not run:
```bash
./build/fluffy --rpc
./build/fluffy --rpc --rpc-api:portal,portal_debug
```

Run the `content_verifier` tool and see if all epoch accumulators are found:
Expand Down Expand Up @@ -146,7 +146,7 @@ This will store blocks 1 to 10 into a json file located at
`portal_history_propagate` JSON-RPC API call:

```bash
./build/fluffy --rpc
./build/fluffy --rpc --rpc-api:portal,portal_debug

# From another shell
curl -s -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":"1","method":"portal_history_propagate","params":["./user_data_dir/eth-history-data.json"]}' http://localhost:8545 | jq
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ First build Fluffy as explained [here](./quick-start.md#build-the-fluffy-client)

Next run it with the JSON-RPC server enabled:
```bash
./build/fluffy --rpc --bootstrap-node:enr:<base64 encoding of ENR>
./build/fluffy --rpc --rpc-api:portal,discovery --bootstrap-node:enr:<base64 encoding of ENR>
```

### Testing Discovery v5 Layer
Expand Down
12 changes: 9 additions & 3 deletions fluffy/eth_data/era1.nim
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import
beacon_chain/spec/beacon_time,
ssz_serialization,
ncli/e2store,
../network/history/accumulator
../network/history/validation/historical_hashes_accumulator

from nimcrypto/hash import fromHex
from ../../nimbus/utils/utils import calcTxRoot, calcReceiptsRoot
Expand Down Expand Up @@ -450,7 +450,10 @@ proc buildAccumulator*(f: Era1File): Result[EpochRecordCached, string] =
totalDifficulty = ?f.getTotalDifficulty(blockNumber)

headerRecords.add(
HeaderRecord(blockHash: blockHeader.blockHash(), totalDifficulty: totalDifficulty)
HeaderRecord(
blockHash: BlockHash(data: blockHeader.blockHash().data),
totalDifficulty: totalDifficulty,
)
)

ok(EpochRecordCached.init(headerRecords))
Expand Down Expand Up @@ -479,7 +482,10 @@ proc verify*(f: Era1File): Result[Digest, string] =
return err("Invalid receipts root")

headerRecords.add(
HeaderRecord(blockHash: blockHeader.blockHash(), totalDifficulty: totalDifficulty)
HeaderRecord(
blockHash: BlockHash(data: blockHeader.blockHash().data),
totalDifficulty: totalDifficulty,
)
)

let expectedRoot = ?f.getAccumulatorRoot()
Expand Down
3 changes: 2 additions & 1 deletion fluffy/eth_data/history_data_json_store.nim
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import
chronicles,
eth/[rlp, common/eth_types],
../../nimbus/common/[chain_config, genesis],
../network/history/[history_content, accumulator]
../network/history/[history_content, validation/historical_hashes_accumulator]

export results, tables

Expand Down Expand Up @@ -160,6 +160,7 @@ proc getGenesisHeader*(id: NetworkId = MainNet): BlockHeader =
try:
networkParams(id)
except ValueError, RlpError:
debugEcho getCurrentException()[]
raise (ref Defect)(msg: "Network parameters should be valid")

try:
Expand Down
3 changes: 2 additions & 1 deletion fluffy/eth_data/history_data_seeding.nim
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ import
eth/common/eth_types,
eth/rlp,
../network/wire/portal_protocol,
../network/history/[history_content, history_network, accumulator],
../network/history/
[history_content, history_network, validation/historical_hashes_accumulator],
"."/[era1, history_data_json_store, history_data_ssz_e2s]

export results
Expand Down
8 changes: 5 additions & 3 deletions fluffy/eth_data/history_data_ssz_e2s.nim
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,19 @@ import
results,
eth/[rlp, common/eth_types],
ncli/e2store,
../network/history/[history_content, accumulator]
../network/history/[history_content, validation/historical_hashes_accumulator]

export results

# Reading SSZ data from files

proc readAccumulator*(file: string): Result[FinishedAccumulator, string] =
proc readAccumulator*(
file: string
): Result[FinishedHistoricalHashesAccumulator, string] =
let encodedAccumulator = ?readAllFile(file).mapErr(toString)

try:
ok(SSZ.decode(encodedAccumulator, FinishedAccumulator))
ok(SSZ.decode(encodedAccumulator, FinishedHistoricalHashesAccumulator))
except SerializationError as e:
err("Failed decoding accumulator: " & e.msg)

Expand Down
Loading

0 comments on commit bec5600

Please sign in to comment.