Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modularize validator store #6705

Draft
wants to merge 29 commits into
base: unstable
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
73743a5
Tracing executor and metrics rename
AgeManning Sep 23, 2024
eb0cef9
Merge latest unstable
AgeManning Nov 26, 2024
0ecd6c3
pass slots_per_epoch at runtime
jxs Dec 16, 2024
6f3fe95
remove generic E from unrequired types
jxs Dec 18, 2024
a9cc2fb
Merge branch 'unstable' of github.com:sigp/lighthouse into remove-e-f…
jxs Dec 19, 2024
30998cd
move `validator_store` to `lighthouse_validator_store`
dknopik Dec 19, 2024
f539f85
make validator_store into a trait
dknopik Dec 10, 2024
e473f63
further reduce dependencies
dknopik Dec 10, 2024
45ce473
remove `environment` dependency on `beacon_node_fallback`
dknopik Dec 12, 2024
be95852
Manually pull in some changes from tracing-integration (thanks sayan!)
dknopik Dec 12, 2024
77eca5a
remove `environment` from `validator_services`
dknopik Dec 12, 2024
9b7c160
unify boost factor accessors
dknopik Dec 12, 2024
b49845a
add builder for DutiesService
dknopik Dec 13, 2024
79367de
Manually merge tracing PR for beacon_node_fallback
dknopik Dec 13, 2024
838eed6
Fix chain_spec for BlockService
dknopik Dec 13, 2024
6533a67
address review
jxs Dec 20, 2024
36a4d2f
remove PhantomData from SyncDutiesMap
jxs Dec 20, 2024
c5ab120
Merge branch 'remove-e-from-validator-client' into modularize-validat…
dknopik Jan 2, 2025
ebfe234
fix tests
dknopik Jan 2, 2025
07dbf15
correct test
dknopik Jan 2, 2025
39aec1b
Add `E` to `ValidatorStore` as associated type
dknopik Jan 3, 2025
e0da923
fix tests
dknopik Jan 3, 2025
4510b73
derive Clone for ValidatorStore's Error and required sub-errors
dknopik Jan 7, 2025
3208430
Merge pull request #6768 from sigp/unstable
jimmygchen Jan 8, 2025
c0fffc0
Merge branch 'anchor' into modularize-validator-store
jimmygchen Jan 8, 2025
b6a3f0e
switch to enum for block signing to allow differing types
dknopik Jan 9, 2025
0130307
Merge remote-tracking branch 'origin/modularize-validator-store' into…
dknopik Jan 9, 2025
9a4768a
Anchor pre-PR: Decouple eth2 (#6770)
dknopik Jan 9, 2025
6ad04b0
Merge branch 'anchor' into modularize-validator-store
jimmygchen Jan 9, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 33 additions & 19 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,11 @@ members = [
"validator_client/http_api",
"validator_client/http_metrics",
"validator_client/initialized_validators",
"validator_client/lighthouse_validator_store",
"validator_client/signing_method",
"validator_client/slashing_protection",
"validator_client/validator_metrics",
"validator_client/validator_services",
"validator_client/validator_store",

"validator_manager",

Expand Down Expand Up @@ -253,6 +253,7 @@ int_to_bytes = { path = "consensus/int_to_bytes" }
kzg = { path = "crypto/kzg" }
metrics = { path = "common/metrics" }
lighthouse_network = { path = "beacon_node/lighthouse_network" }
lighthouse_validator_store = { path = "validator_client/lighthouse_validator_store" }
lighthouse_version = { path = "common/lighthouse_version" }
lockfile = { path = "common/lockfile" }
logging = { path = "common/logging" }
Expand Down
1 change: 0 additions & 1 deletion beacon_node/execution_layer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ arc-swap = "1.6.0"
builder_client = { path = "../builder_client" }
bytes = { workspace = true }
eth2 = { workspace = true }
eth2_network_config = { workspace = true }
ethereum_serde_utils = { workspace = true }
ethereum_ssz = { workspace = true }
ethers-core = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/http_api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ rand = { workspace = true }
safe_arith = { workspace = true }
sensitive_url = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
slog = { workspace = true }
slot_clock = { workspace = true }
state_processing = { workspace = true }
Expand All @@ -48,7 +49,6 @@ warp_utils = { workspace = true }
genesis = { workspace = true }
logging = { workspace = true }
proto_array = { workspace = true }
serde_json = { workspace = true }

[[test]]
name = "bn_http_api_tests"
Expand Down
12 changes: 11 additions & 1 deletion beacon_node/http_api/src/database.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
use beacon_chain::store::metadata::CURRENT_SCHEMA_VERSION;
use beacon_chain::{BeaconChain, BeaconChainTypes};
use eth2::lighthouse::DatabaseInfo;
use serde::Serialize;
use std::sync::Arc;
use store::{AnchorInfo, BlobInfo, Split, StoreConfig};

#[derive(Debug, Serialize)]
pub struct DatabaseInfo {
pub schema_version: u64,
pub config: StoreConfig,
pub split: Split,
pub anchor: AnchorInfo,
pub blob_info: BlobInfo,
}

pub fn info<T: BeaconChainTypes>(
chain: Arc<BeaconChain<T>>,
Expand Down
56 changes: 30 additions & 26 deletions beacon_node/http_api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3059,9 +3059,9 @@ pub fn serve<T: BeaconChainTypes>(
peer_id: peer_id.to_string(),
enr: peer_info.enr().map(|enr| enr.to_base64()),
last_seen_p2p_address: address,
direction: api_types::PeerDirection::from_connection_direction(dir),
state: api_types::PeerState::from_peer_connection_status(
peer_info.connection_status(),
direction: api_types::PeerDirection::from((*dir).clone()),
state: api_types::PeerState::from(
peer_info.connection_status().clone(),
),
}));
}
Expand Down Expand Up @@ -3104,10 +3104,9 @@ pub fn serve<T: BeaconChainTypes>(

// the eth2 API spec implies only peers we have been connected to at some point should be included.
if let Some(dir) = peer_info.connection_direction() {
let direction =
api_types::PeerDirection::from_connection_direction(dir);
let state = api_types::PeerState::from_peer_connection_status(
peer_info.connection_status(),
let direction = api_types::PeerDirection::from((*dir).clone());
let state = api_types::PeerState::from(
peer_info.connection_status().clone(),
);

let state_matches = query.state.as_ref().map_or(true, |states| {
Expand Down Expand Up @@ -3160,9 +3159,8 @@ pub fn serve<T: BeaconChainTypes>(
.read()
.peers()
.for_each(|(_, peer_info)| {
let state = api_types::PeerState::from_peer_connection_status(
peer_info.connection_status(),
);
let state =
api_types::PeerState::from(peer_info.connection_status().clone());
match state {
api_types::PeerState::Connected => connected += 1,
api_types::PeerState::Connecting => connecting += 1,
Expand Down Expand Up @@ -4175,15 +4173,18 @@ pub fn serve<T: BeaconChainTypes>(
|task_spawner: TaskSpawner<T::EthSpec>,
network_globals: Arc<NetworkGlobals<T::EthSpec>>| {
task_spawner.blocking_json_task(Priority::P1, move || {
Ok(network_globals
.peers
.read()
.peers()
.map(|(peer_id, peer_info)| eth2::lighthouse::Peer {
let mut peers = vec![];
for (peer_id, peer_info) in network_globals.peers.read().peers() {
peers.push(eth2::lighthouse::Peer {
peer_id: peer_id.to_string(),
peer_info: peer_info.clone(),
})
.collect::<Vec<_>>())
peer_info: serde_json::to_value(peer_info).map_err(|e| {
warp_utils::reject::custom_not_found(format!(
"unable to serialize peer_info: {e:?}",
))
})?,
});
}
Ok(peers)
})
},
);
Expand All @@ -4199,15 +4200,18 @@ pub fn serve<T: BeaconChainTypes>(
|task_spawner: TaskSpawner<T::EthSpec>,
network_globals: Arc<NetworkGlobals<T::EthSpec>>| {
task_spawner.blocking_json_task(Priority::P1, move || {
Ok(network_globals
.peers
.read()
.connected_peers()
.map(|(peer_id, peer_info)| eth2::lighthouse::Peer {
let mut peers = vec![];
for (peer_id, peer_info) in network_globals.peers.read().connected_peers() {
peers.push(eth2::lighthouse::Peer {
peer_id: peer_id.to_string(),
peer_info: peer_info.clone(),
})
.collect::<Vec<_>>())
peer_info: serde_json::to_value(peer_info).map_err(|e| {
warp_utils::reject::custom_not_found(format!(
"unable to serialize peer_info: {e:?}",
))
})?,
});
}
Ok(peers)
})
},
);
Expand Down
13 changes: 10 additions & 3 deletions beacon_node/http_api/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ use state_processing::per_slot_processing;
use state_processing::state_advance::partial_state_advance;
use std::convert::TryInto;
use std::sync::Arc;
use store::{AnchorInfo, Split};
use tokio::time::Duration;
use tree_hash::TreeHash;
use types::application_domain::ApplicationDomain;
Expand Down Expand Up @@ -5646,10 +5647,16 @@ impl ApiTester {
pub async fn test_get_lighthouse_database_info(self) -> Self {
let info = self.client.get_lighthouse_database_info().await.unwrap();

assert_eq!(info.anchor, self.chain.store.get_anchor_info());
assert_eq!(info.split, self.chain.store.get_split_info());
assert_eq!(
info.schema_version,
serde_json::from_value::<AnchorInfo>(info.get("anchor").unwrap().clone()).unwrap(),
self.chain.store.get_anchor_info()
);
assert_eq!(
serde_json::from_value::<Split>(info.get("split").unwrap().clone()).unwrap(),
self.chain.store.get_split_info()
);
assert_eq!(
serde_json::from_value::<u64>(info.get("schema_version").unwrap().clone()).unwrap(),
store::metadata::CURRENT_SCHEMA_VERSION.as_u64()
);

Expand Down
1 change: 1 addition & 0 deletions beacon_node/lighthouse_network/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ directory = { workspace = true }
dirs = { workspace = true }
discv5 = { workspace = true }
either = { workspace = true }
eth2 = { workspace = true }
ethereum_ssz = { workspace = true }
ethereum_ssz_derive = { workspace = true }
fnv = { workspace = true }
Expand Down
Loading
Loading