Skip to content

Commit

Permalink
Refactor portal test directory structure
Browse files Browse the repository at this point in the history
  • Loading branch information
kdeme committed Sep 24, 2024
1 parent 0fb9581 commit c98423e
Show file tree
Hide file tree
Showing 32 changed files with 287 additions and 274 deletions.
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -281,17 +281,17 @@ fluffy-test-reproducibility:
{ echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; }

# fluffy tests
all_fluffy_portal_spec_tests: | build deps
all_history_network_custom_chain_tests: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/portal_spec_tests/mainnet/$@.nim"
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/history_network_tests/$@.nim"


all_fluffy_tests: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130 -o:build/$@ "fluffy/tests/$@.nim"
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/$@.nim"

# builds and runs the fluffy test suite
fluffy-test: | all_fluffy_portal_spec_tests all_fluffy_tests
fluffy-test: | all_fluffy_tests all_history_network_custom_chain_tests

# builds the fluffy tools, wherever they are
$(FLUFFY_TOOLS): | build deps rocksdb
Expand Down Expand Up @@ -357,7 +357,7 @@ txparse: | build deps

# usual cleaning
clean: | clean-common
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_fluffy_portal_spec_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
rm -rf tools/t8n/{t8n,t8n_test}
rm -rf tools/evmstate/{evmstate,evmstate_test}
ifneq ($(USE_LIBBACKTRACE), 0)
Expand Down
2 changes: 1 addition & 1 deletion fluffy/network/history/history_network.nim
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ import
../wire/[portal_protocol, portal_stream, portal_protocol_config],
"."/[
history_content,
beacon_chain_historical_roots,
validation/historical_hashes_accumulator,
],
../beacon/beacon_chain_historical_roots,
./content/content_deprecated

logScope:
Expand Down
2 changes: 1 addition & 1 deletion fluffy/scripts/test_portal_testnet.nim
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import
../rpc/eth_rpc_client,
../eth_data/[history_data_seeding, history_data_json_store, history_data_ssz_e2s],
../network/history/[history_content, validation/historical_hashes_accumulator],
../tests/test_history_util
../tests/history_network_tests/test_history_util

type
FutureCallback[A] = proc(): Future[A] {.gcsafe, raises: [].}
Expand Down
6 changes: 1 addition & 5 deletions fluffy/tests/all_fluffy_tests.nim
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,9 @@
{.warning[UnusedImport]: off.}

import
./test_portal_wire_protocol,
./test_accumulator,
./test_history_network,
./test_content_db,
./test_discovery_rpc,
./test_beacon_chain_historical_roots,
./test_beacon_chain_historical_summaries,
./wire_protocol_tests/all_wire_protocol_tests,
./history_network_tests/all_history_network_tests,
./beacon_network_tests/all_beacon_network_tests,
./state_network_tests/all_state_network_tests
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

import
./test_beacon_content,
./test_beacon_historical_roots,
./test_beacon_historical_summaries,
./test_beacon_historical_summaries_vectors,
./test_beacon_network,
./test_beacon_light_client
6 changes: 3 additions & 3 deletions fluffy/tests/beacon_network_tests/test_beacon_content.nim
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import
../../eth_data/yaml_utils,
"."/light_client_test_data

suite "Beacon Content Encodings - Mainnet":
suite "Beacon Content Keys and Values - Test Vectors":
# These test vectors are generated by eth_data_exporter. The content is taken
# from mainnet and encoded as it would be transmitted on Portal Network,
# including also the content key.
Expand Down Expand Up @@ -170,7 +170,7 @@ suite "Beacon Content Encodings - Mainnet":
check encoded == contentValueEncoded
check encode(key).asSeq() == contentKeyEncoded

suite "Beacon Content Encodings":
suite "Beacon Content Keys and Values":
# TODO: These tests are less useful now and should instead be altered to
# use the consensus test vectors to simply test if encoding / decoding works
# fine for the different forks.
Expand Down Expand Up @@ -277,7 +277,7 @@ suite "Beacon Content Encodings":
decodeLightClientBootstrapForked(forkDigests, encodedTooEarlyFork).isErr()
decodeLightClientBootstrapForked(forkDigests, encodedUnknownFork).isErr()

suite "Beacon ContentKey Encodings ":
suite "Beacon Content Keys - Invalid Cases":
test "Invalid prefix - 0 value":
let encoded = ContentKeyByteList.init(@[byte 0x00])
let decoded = decode(encoded)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import
beacon_chain /../ tests/testblockutil,
beacon_chain /../ tests/mocking/mock_genesis,
beacon_chain /../ tests/consensus_spec/fixtures_utils,
../network/history/beacon_chain_historical_roots
../../network/beacon/beacon_chain_historical_roots

suite "Beacon Chain Historical Roots":
let
Expand Down
108 changes: 36 additions & 72 deletions fluffy/tests/beacon_network_tests/test_beacon_historical_summaries.nim
Original file line number Diff line number Diff line change
@@ -1,84 +1,48 @@
# fluffy
# Copyright (c) 2024 Status Research & Development GmbH
# Copyright (c) 2023-2024 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.

{.used.}

{.push raises: [].}

import
unittest2,
stew/byteutils,
results,
beacon_chain/networking/network_metadata,
beacon_chain/spec/forks,
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
../../eth_data/yaml_utils

type YamlHistoricalSummariesWithProof* = object
content_key*: string
content_value*: string
beacon_state_root*: string
historical_summaries_root*: string
historical_summaries_state_proof*: array[5, string]
epoch*: uint64

suite "Beacon HistoricalSummariesWithProof":
const testVectorDir =
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"

beacon_chain/spec/datatypes/capella,
# Test helpers
beacon_chain /../ tests/testblockutil,
beacon_chain /../ tests/mocking/mock_genesis,
beacon_chain /../ tests/consensus_spec/fixtures_utils,
../../network/beacon/beacon_chain_historical_summaries

suite "Beacon Chain Historical Summaries":
let
metadata = getMetadataForNetwork("mainnet")
genesisState =
try:
template genesisData(): auto =
metadata.genesis.bakedBytes

newClone(
readSszForkedHashedBeaconState(
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
)
)
except CatchableError as err:
raiseAssert "Invalid baked-in state: " & err.msg

# Although the test data is generated from a test state, we need to use the
# forkDigests of mainnet as apparently these are used in the generated test vector.
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
# genesis_validators_root = Digest.fromHex(
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
# )
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)

test "HistoricalSummaries Encoding/Decoding and Verification":
const file = testVectorDir & "historical_summaries_with_proof.yaml"
let
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
raiseAssert "Invalid test vector file: " & error

contentKeyEncoded = testCase.content_key.hexToSeqByte()
contentValueEncoded = testCase.content_value.hexToSeqByte()

# Decode content and content key
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
contentValue =
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
check:
contentKey.isOk()
contentValue.isOk()

let summariesWithProof = contentValue.value()
let root = hash_tree_root(summariesWithProof.historical_summaries)

check:
root.data == testCase.historical_summaries_root.hexToSeqByte()
summariesWithProof.epoch == testCase.epoch
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))

# Encode content and content key
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
check:
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
encode(contentKey.value()).asSeq() == contentKeyEncoded
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
state = newClone(initGenesisState(cfg = cfg))
var cache = StateCache()

var blocks: seq[capella.SignedBeaconBlock]
# Note:
# Adding 8192 blocks. First block is genesis block and not one of these.
# Then one extra block is needed to get the historical summaries, block
# roots and state roots processed.
# index i = 0 is second block.
# index i = 8190 is 8192th block and last one that is part of the first
# historical root
for i in 0 ..< SLOTS_PER_HISTORICAL_ROOT:
blocks.add(addTestBlock(state[], cache, cfg = cfg).capellaData)

test "Historical Summaries Proof":
withState(state[]):
when consensusFork >= ConsensusFork.Capella:
let historical_summaries = forkyState.data.historical_summaries
let res = buildProof(state[])
check res.isOk()
let proof = res.get()

withState(state[]):
check verifyProof(historical_summaries, proof, forkyState.root)
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# fluffy
# Copyright (c) 2024 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.

{.used.}

import
unittest2,
stew/byteutils,
results,
beacon_chain/networking/network_metadata,
beacon_chain/spec/forks,
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
../../eth_data/yaml_utils

type YamlHistoricalSummariesWithProof* = object
content_key*: string
content_value*: string
beacon_state_root*: string
historical_summaries_root*: string
historical_summaries_state_proof*: array[5, string]
epoch*: uint64

suite "Beacon Chain Historical Summaries With Proof - Test Vectors":
const testVectorDir =
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"

let
metadata = getMetadataForNetwork("mainnet")
genesisState =
try:
template genesisData(): auto =
metadata.genesis.bakedBytes

newClone(
readSszForkedHashedBeaconState(
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
)
)
except CatchableError as err:
raiseAssert "Invalid baked-in state: " & err.msg

# Although the test data is generated from a test state, we need to use the
# forkDigests of mainnet as apparently these are used in the generated test vector.
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
# genesis_validators_root = Digest.fromHex(
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
# )
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)

test "HistoricalSummaries Encoding/Decoding and Verification":
const file = testVectorDir & "historical_summaries_with_proof.yaml"
let
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
raiseAssert "Invalid test vector file: " & error

contentKeyEncoded = testCase.content_key.hexToSeqByte()
contentValueEncoded = testCase.content_value.hexToSeqByte()

# Decode content and content key
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
contentValue =
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
check:
contentKey.isOk()
contentValue.isOk()

let summariesWithProof = contentValue.value()
let root = hash_tree_root(summariesWithProof.historical_summaries)

check:
root.data == testCase.historical_summaries_root.hexToSeqByte()
summariesWithProof.epoch == testCase.epoch
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))

# Encode content and content key
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
check:
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
encode(contentKey.value()).asSeq() == contentKeyEncoded
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import
../../network/beacon/[beacon_init_loader, beacon_light_client],
"."/[light_client_test_data, beacon_test_helpers]

procSuite "Portal Beacon Light Client":
procSuite "Beacon Light Client":
let rng = newRng()

proc headerCallback(
Expand Down
2 changes: 1 addition & 1 deletion fluffy/tests/beacon_network_tests/test_beacon_network.nim
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import
[beacon_network, beacon_init_loader, beacon_chain_historical_summaries],
"."/[light_client_test_data, beacon_test_helpers]

procSuite "Beacon Content Network":
procSuite "Beacon Network":
let rng = newRng()

asyncTest "Get bootstrap by trusted block hash":
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Nimbus
# Copyright (c) 2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed except according to those terms.

{.warning[UnusedImport]: off.}

# Note: These tests are separated because they require a custom merge block
# number defined at compile time. Once runtime chain config gets added these
# tests can be compiled together with all the other portal tests.
import
./test_historical_hashes_accumulator,
./test_history_network
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
{.warning[UnusedImport]: off.}

import
./test_history_content_keys,
./test_history_content,
./test_history_content_validation,
./test_historical_hashes_accumulator_root,
./test_block_proof_historical_roots,
./test_block_proof_historical_roots_vectors,
./test_block_proof_historical_summaries,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ import
# TODO: Add more blocks to reach 1+ historical roots, to make sure that indexing
# is properly tested.

suite "Beacon Chain Block Proofs - Bellatrix":
suite "History Block Proofs - Historical Roots":
let
cfg = block:
var res = defaultRuntimeConfig
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import
../../network/history/validation/block_proof_historical_roots,
../../eth_data/[yaml_utils, yaml_eth_types]

suite "History Block Proofs - Historical Roots":
suite "History Block Proofs - Historical Roots - Test Vectors":
test "BlockProofHistoricalRoots for Execution BlockHeader":
let
testsPath =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ import
# - Adjust tests to test usage of historical_summaries and historical_roots
# together.

suite "Beacon Chain Block Proofs - Capella":
suite "History Block Proofs - Historical Summaries":
let
cfg = block:
var res = defaultRuntimeConfig
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ proc readHistoricalSummaries(
except SerializationError as err:
err("Failed decoding historical_summaries: " & err.msg)

suite "History Block Proofs - Historical Summaries":
suite "History Block Proofs - Historical Summaries - Test Vectors":
test "BlockProofHistoricalSummaries for Execution BlockHeader":
let
testsPath =
Expand Down
Loading

0 comments on commit c98423e

Please sign in to comment.