diff --git a/Cargo.lock b/Cargo.lock index 2898a761f..0b912707d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1302,6 +1302,7 @@ dependencies = [ "anyhow", "borsh", "serde", + "sov-data-generators", "sov-modules-api", "sov-rollup-interface", "sov-state", @@ -1334,6 +1335,7 @@ dependencies = [ "serde_json", "sov-accounts", "sov-bank", + "sov-data-generators", "sov-election", "sov-evm", "sov-modules-api", @@ -2891,10 +2893,15 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "sov-chain-state", + "sov-data-generators", "sov-modules-api", + "sov-modules-macros", + "sov-modules-stf-template", "sov-rollup-interface", "sov-schema-db", "sov-state", + "sov-value-setter", "tempfile", ] @@ -2948,11 +2955,11 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +source = "git+https://github.com/penumbra-zone/jmt#46b4b0042f66506536097d689ac2201e3e430afd" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -6244,6 +6251,26 @@ dependencies = [ "tempfile", ] +[[package]] +name = "sov-chain-state" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-bank", + "sov-data-generators", + "sov-modules-api", + "sov-modules-macros", + "sov-modules-stf-template", + "sov-rollup-interface", + "sov-state", + "sov-value-setter", + "tempfile", +] + [[package]] name = "sov-cli" version = "0.1.0" @@ -6257,6 +6284,21 @@ dependencies = [ "sov-modules-api", ] +[[package]] +name = "sov-data-generators" +version = "0.1.0" +dependencies = [ + "borsh", + "proptest", + "sov-bank", + "sov-election", + "sov-modules-api", + "sov-modules-stf-template", + "sov-rollup-interface", + "sov-state", + "sov-value-setter", +] + [[package]] name = "sov-db" version = "0.1.0" @@ -6467,6 +6509,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] diff --git a/Cargo.toml b/Cargo.toml index 3b97a1e15..579a7b688 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,8 +21,10 @@ members = [ "module-system/sov-modules-api", "module-system/module-schemas", "module-system/utils/sov-first-read-last-write-cache", + "module-system/utils/sov-data-generators", "module-system/module-implementations/sov-accounts", "module-system/module-implementations/sov-bank", + "module-system/module-implementations/sov-chain-state", "module-system/module-implementations/sov-blob-storage", "module-system/module-implementations/sov-evm", "module-system/module-implementations/sov-prover-incentives", @@ -48,7 +50,8 @@ rust-version = "1.66" [workspace.dependencies] # Dependencies maintained by sovereign -jmt = "0.6.0" +# TODO: replace by release number once available on crates.io: tracking issue https://github.com/Sovereign-Labs/sovereign-sdk/issues/632 +jmt = { git = "https://github.com/penumbra-zone/jmt", commit = "46b4b00" } # External dependencies async-trait = "0.1.71" diff --git a/adapters/celestia/src/celestia.rs b/adapters/celestia/src/celestia.rs index 370dd9118..14daa8c10 100644 --- a/adapters/celestia/src/celestia.rs +++ b/adapters/celestia/src/celestia.rs @@ -11,6 +11,7 @@ use prost::bytes::Buf; use prost::Message; use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::{BlockHeaderTrait as BlockHeader, CountedBufReader}; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::AddressTrait; pub use tendermint::block::Header as TendermintHeader; use tendermint::block::Height; @@ -29,7 +30,7 @@ use crate::pfb::{BlobTx, MsgPayForBlobs, Tx}; use crate::shares::{read_varint, BlobIterator, BlobRefIterator, NamespaceGroup}; use crate::utils::BoxError; use crate::verifier::address::CelestiaAddress; -use crate::verifier::{TmHash, PFB_NAMESPACE}; +use crate::verifier::{ChainValidityCondition, TmHash, PFB_NAMESPACE}; #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] pub struct MarshalledDataAvailabilityHeader { @@ -303,6 +304,30 @@ impl BlockHeader for CelestiaHeader { } } +/// We implement [`SlotData`] for [`CelestiaHeader`] in a similar fashion as for [`FilteredCelestiaBlock`] +impl SlotData for CelestiaHeader { + type BlockHeader = CelestiaHeader; + type Cond = ChainValidityCondition; + + fn hash(&self) -> [u8; 32] { + match self.header.hash() { + tendermint::Hash::Sha256(h) => h, + tendermint::Hash::None => unreachable!("tendermint::Hash::None should not be possible"), + } + } + + fn header(&self) -> &Self::BlockHeader { + self + } + + fn validity_condition(&self) -> ChainValidityCondition { + ChainValidityCondition { + prev_hash: *self.header().prev_hash().inner(), + block_hash: ::hash(self), + } + } +} + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] pub struct CelestiaVersion { pub block: u32, diff --git a/adapters/celestia/src/types.rs b/adapters/celestia/src/types.rs index 7e5481927..591e7b636 100644 --- a/adapters/celestia/src/types.rs +++ b/adapters/celestia/src/types.rs @@ -6,6 +6,7 @@ use base64::Engine; use borsh::{BorshDeserialize, BorshSerialize}; pub use nmt_rs::NamespaceId; use serde::{Deserialize, Serialize}; +use sov_rollup_interface::da::BlockHeaderTrait; use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::Bytes; use tendermint::crypto::default::Sha256; @@ -14,7 +15,7 @@ use tendermint::merkle; use crate::pfb::MsgPayForBlobs; use crate::shares::{NamespaceGroup, Share}; use crate::utils::BoxError; -use crate::verifier::PARITY_SHARES_NAMESPACE; +use crate::verifier::{ChainValidityCondition, PARITY_SHARES_NAMESPACE}; use crate::{CelestiaHeader, TxPosition}; #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] @@ -81,6 +82,7 @@ pub struct FilteredCelestiaBlock { impl SlotData for FilteredCelestiaBlock { type BlockHeader = CelestiaHeader; + type Cond = ChainValidityCondition; fn hash(&self) -> [u8; 32] { match self.header.header.hash() { @@ -92,6 +94,13 @@ impl SlotData for FilteredCelestiaBlock { fn header(&self) -> &Self::BlockHeader { &self.header } + + fn validity_condition(&self) -> ChainValidityCondition { + ChainValidityCondition { + prev_hash: *self.header().prev_hash().inner(), + block_hash: self.hash(), + } + } } impl FilteredCelestiaBlock { diff --git a/adapters/celestia/src/verifier/mod.rs b/adapters/celestia/src/verifier/mod.rs index 819244243..f0fef3d69 100644 --- a/adapters/celestia/src/verifier/mod.rs +++ b/adapters/celestia/src/verifier/mod.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use nmt_rs::NamespaceId; use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::{ @@ -97,6 +98,8 @@ impl DaSpec for CelestiaSpec { type CompletenessProof = Vec; type ChainParams = RollupParams; + + type ValidityCondition = ChainValidityCondition; } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -104,7 +107,18 @@ pub struct RollupParams { pub namespace: NamespaceId, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Serialize, + Deserialize, + Hash, + BorshDeserialize, + BorshSerialize, +)] /// A validity condition expressing that a chain of DA layer blocks is contiguous and canonical pub struct ChainValidityCondition { pub prev_hash: [u8; 32], @@ -131,8 +145,6 @@ impl da::DaVerifier for CelestiaVerifier { type Error = ValidationError; - type ValidityCondition = ChainValidityCondition; - fn new(params: ::ChainParams) -> Self { Self { rollup_namespace: params.namespace, @@ -145,7 +157,7 @@ impl da::DaVerifier for CelestiaVerifier { txs: &[::BlobTransaction], inclusion_proof: ::InclusionMultiProof, completeness_proof: ::CompletenessProof, - ) -> Result { + ) -> Result<::ValidityCondition, Self::Error> { // Validate that the provided DAH is well-formed block_header.validate_dah()?; let validity_condition = ChainValidityCondition { diff --git a/adapters/risc0/src/guest.rs b/adapters/risc0/src/guest.rs index f664e0ff2..873aa882e 100644 --- a/adapters/risc0/src/guest.rs +++ b/adapters/risc0/src/guest.rs @@ -1,6 +1,7 @@ #[cfg(target_os = "zkvm")] use risc0_zkvm::guest::env; use sov_rollup_interface::zk::{Zkvm, ZkvmGuest}; +use sov_rollup_interface::AddressTrait; use crate::Risc0MethodId; @@ -37,7 +38,18 @@ impl Zkvm for Risc0Guest { _serialized_proof: &'a [u8], _code_commitment: &Self::CodeCommitment, ) -> Result<&'a [u8], Self::Error> { - // Implement this method once risc0 supports recursion - todo!() + // Implement this method once risc0 supports recursion: issue #633 + todo!("Implement once risc0 supports recursion: https://github.com/Sovereign-Labs/sovereign-sdk/issues/633") + } + + fn verify_and_extract_output< + C: sov_rollup_interface::zk::ValidityCondition, + Add: AddressTrait, + >( + _serialized_proof: &[u8], + _code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error> { + // Implement this method once risc0 supports recursion: issue https://github.com/Sovereign-Labs/sovereign-sdk/issues/633 + todo!("Implement once risc0 supports recursion: https://github.com/Sovereign-Labs/sovereign-sdk/issues/633") } } diff --git a/adapters/risc0/src/host.rs b/adapters/risc0/src/host.rs index e7ca82a93..56f91d672 100644 --- a/adapters/risc0/src/host.rs +++ b/adapters/risc0/src/host.rs @@ -6,6 +6,7 @@ use risc0_zkvm::{ Executor, ExecutorEnvBuilder, LocalExecutor, SegmentReceipt, Session, SessionReceipt, }; use sov_rollup_interface::zk::{Zkvm, ZkvmHost}; +use sov_rollup_interface::AddressTrait; use crate::Risc0MethodId; @@ -55,6 +56,16 @@ impl<'prover> Zkvm for Risc0Host<'prover> { ) -> Result<&'a [u8], Self::Error> { verify_from_slice(serialized_proof, code_commitment) } + + fn verify_and_extract_output< + C: sov_rollup_interface::zk::ValidityCondition, + Add: AddressTrait, + >( + _serialized_proof: &[u8], + _code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error> { + todo!("Implement once risc0 supports recursion, issue https://github.com/Sovereign-Labs/sovereign-sdk/issues/633") + } } pub struct Risc0Verifier; @@ -70,6 +81,18 @@ impl Zkvm for Risc0Verifier { ) -> Result<&'a [u8], Self::Error> { verify_from_slice(serialized_proof, code_commitment) } + + fn verify_and_extract_output< + C: sov_rollup_interface::zk::ValidityCondition, + Add: AddressTrait, + >( + _serialized_proof: &[u8], + _code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error> { + // Method to implement: not clear how to deserialize the proof output. + // Issue https://github.com/Sovereign-Labs/sovereign-sdk/issues/621 + todo!("not clear how to deserialize the proof output. Issue https://github.com/Sovereign-Labs/sovereign-sdk/issues/621") + } } fn verify_from_slice<'a>( diff --git a/examples/demo-nft-module/Cargo.toml b/examples/demo-nft-module/Cargo.toml index 077ed8298..ced29a107 100644 --- a/examples/demo-nft-module/Cargo.toml +++ b/examples/demo-nft-module/Cargo.toml @@ -19,6 +19,7 @@ sov-state = { path = "../../module-system/sov-state", default-features = false } [dev-dependencies] sov-rollup-interface = { path = "../../rollup-interface" } +sov-data-generators = { path = "../../module-system/utils/sov-data-generators" } tempfile = { workspace = true } diff --git a/examples/demo-nft-module/tests/nft_test.rs b/examples/demo-nft-module/tests/nft_test.rs index d904e23f5..3a292d72f 100644 --- a/examples/demo-nft-module/tests/nft_test.rs +++ b/examples/demo-nft-module/tests/nft_test.rs @@ -2,7 +2,7 @@ use demo_nft_module::call::CallMessage; use demo_nft_module::query::OwnerResponse; use demo_nft_module::{NonFungibleToken, NonFungibleTokenConfig}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address as gen_addr_generic; +use sov_modules_api::utils::generate_address as gen_addr_generic; use sov_modules_api::{Address, Context, Module}; use sov_rollup_interface::stf::Event; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -21,7 +21,7 @@ fn genesis_and_mint() { let owner2 = generate_address("owner2"); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { admin, - owners: vec![(0, owner1.clone())], + owners: vec![(0, owner1)], }; let tmpdir = tempfile::tempdir().unwrap(); @@ -40,7 +40,7 @@ fn genesis_and_mint() { // Mint, anybody can mint let mint_message = CallMessage::Mint { id: 1 }; - let owner2_context = C::new(owner2.clone()); + let owner2_context = C::new(owner2); nft.call(mint_message.clone(), &owner2_context, &mut working_set) .expect("Minting failed"); @@ -63,23 +63,20 @@ fn genesis_and_mint() { fn transfer() { // Preparation let admin = generate_address("admin"); - let admin_context = C::new(admin.clone()); + let admin_context = C::new(admin); let owner1 = generate_address("owner2"); - let owner1_context = C::new(owner1.clone()); + let owner1_context = C::new(owner1); let owner2 = generate_address("owner2"); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { - admin: admin.clone(), - owners: vec![(0, admin.clone()), (1, owner1.clone()), (2, owner2.clone())], + admin, + owners: vec![(0, admin), (1, owner1), (2, owner2)], }; let tmpdir = tempfile::tempdir().unwrap(); let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let nft = NonFungibleToken::default(); nft.genesis(&config, &mut working_set).unwrap(); - let transfer_message = CallMessage::Transfer { - id: 1, - to: owner2.clone(), - }; + let transfer_message = CallMessage::Transfer { id: 1, to: owner2 }; // admin cannot transfer token of the owner1 let transfer_attempt = nft.call(transfer_message.clone(), &admin_context, &mut working_set); @@ -122,9 +119,9 @@ fn transfer() { fn burn() { // Preparation let admin = generate_address("admin"); - let admin_context = C::new(admin.clone()); + let admin_context = C::new(admin); let owner1 = generate_address("owner2"); - let owner1_context = C::new(owner1.clone()); + let owner1_context = C::new(owner1); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { admin, owners: vec![(0, owner1)], diff --git a/examples/demo-prover/Cargo.lock b/examples/demo-prover/Cargo.lock index 1e3db0d2d..cceb3cac7 100644 --- a/examples/demo-prover/Cargo.lock +++ b/examples/demo-prover/Cargo.lock @@ -1461,11 +1461,11 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +source = "git+https://github.com/penumbra-zone/jmt#46b4b0042f66506536097d689ac2201e3e430afd" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -3368,6 +3368,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] diff --git a/examples/demo-prover/host/src/main.rs b/examples/demo-prover/host/src/main.rs index 6840bc4d6..9f3899e98 100644 --- a/examples/demo-prover/host/src/main.rs +++ b/examples/demo-prover/host/src/main.rs @@ -6,7 +6,7 @@ use demo_stf::app::{App, DefaultPrivateKey}; use demo_stf::genesis_config::create_demo_genesis_config; use jupiter::da_service::{CelestiaService, DaServiceConfig}; use jupiter::types::NamespaceId; -use jupiter::verifier::RollupParams; +use jupiter::verifier::{ChainValidityCondition, RollupParams}; use methods::{ROLLUP_ELF, ROLLUP_ID}; use risc0_adapter::host::{Risc0Host, Risc0Verifier}; use serde::Deserialize; @@ -54,11 +54,10 @@ async fn main() -> Result<(), anyhow::Error> { let sequencer_private_key = DefaultPrivateKey::generate(); - let app: App = + let mut app: App = App::new(rollup_config.runner.storage.clone()); let is_storage_empty = app.get_storage().is_empty(); - let mut demo = app.stf; if is_storage_empty { let genesis_config = create_demo_genesis_config( @@ -69,13 +68,13 @@ async fn main() -> Result<(), anyhow::Error> { &sequencer_private_key, ); info!("Starting from empty storage, initialization chain"); - demo.init_chain(genesis_config); + app.stf.init_chain(genesis_config); } - let mut prev_state_root = { - let res = demo.apply_slot(Default::default(), []); - res.state_root.0 - }; + let mut prev_state_root = app + .get_storage() + .get_state_root(&Default::default()) + .expect("The storage needs to have a state root"); for height in rollup_config.start_height.. { let mut host = Risc0Host::new(ROLLUP_ELF); @@ -103,7 +102,9 @@ async fn main() -> Result<(), anyhow::Error> { host.write_to_guest(&completeness_proof); host.write_to_guest(&blobs); - let result = demo.apply_slot(Default::default(), &mut blobs); + let result = app + .stf + .apply_slot(Default::default(), &filtered_block, &mut blobs); host.write_to_guest(&result.witness); diff --git a/examples/demo-prover/methods/guest/Cargo.lock b/examples/demo-prover/methods/guest/Cargo.lock index 154f49725..14e0a8f96 100644 --- a/examples/demo-prover/methods/guest/Cargo.lock +++ b/examples/demo-prover/methods/guest/Cargo.lock @@ -1032,11 +1032,11 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +source = "git+https://github.com/penumbra-zone/jmt#46b4b0042f66506536097d689ac2201e3e430afd" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -2111,6 +2111,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] diff --git a/examples/demo-prover/methods/guest/src/bin/rollup.rs b/examples/demo-prover/methods/guest/src/bin/rollup.rs index 84bfd0902..45963439a 100644 --- a/examples/demo-prover/methods/guest/src/bin/rollup.rs +++ b/examples/demo-prover/methods/guest/src/bin/rollup.rs @@ -2,16 +2,17 @@ #![no_main] -use const_rollup_config::ROLLUP_NAMESPACE_RAW; +use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; use demo_stf::app::create_zk_app_template; use demo_stf::ArrayWitness; use jupiter::types::NamespaceId; -use jupiter::verifier::{CelestiaSpec, CelestiaVerifier}; +use jupiter::verifier::{CelestiaSpec, CelestiaVerifier, ChainValidityCondition}; use jupiter::{BlobWithSender, CelestiaHeader}; use risc0_adapter::guest::Risc0Guest; use risc0_zkvm::guest::env; use sov_rollup_interface::crypto::NoOpHasher; use sov_rollup_interface::da::{DaSpec, DaVerifier}; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::zk::{StateTransition, ZkvmGuest}; @@ -44,13 +45,15 @@ pub fn main() { env::write(&"blobs have been read\n"); // Step 2: Apply blobs - let mut app = create_zk_app_template::(prev_state_root_hash); + let mut app = create_zk_app_template::( + prev_state_root_hash, + ); let witness: ArrayWitness = guest.read_from_host(); env::write(&"Witness have been read\n"); env::write(&"Applying slot...\n"); - let result = app.apply_slot(witness, &mut blobs); + let result = app.apply_slot(witness, &header, &mut blobs); env::write(&"Slot has been applied\n"); @@ -68,6 +71,8 @@ pub fn main() { initial_state_root: prev_state_root_hash, final_state_root: result.state_root.0, validity_condition, + rewarded_address: SEQUENCER_DA_ADDRESS.to_vec(), + slot_hash: header.hash(), }; env::commit(&output); env::write(&"new state root committed\n"); diff --git a/examples/demo-rollup/benches/rollup_bench.rs b/examples/demo-rollup/benches/rollup_bench.rs index ab54b659f..3488b7adc 100644 --- a/examples/demo-rollup/benches/rollup_bench.rs +++ b/examples/demo-rollup/benches/rollup_bench.rs @@ -14,7 +14,9 @@ use sov_db::ledger_db::{LedgerDB, SlotCommit}; use sov_demo_rollup::rng_xfers::RngDaService; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{ + TestBlob, TestBlock, TestBlockHeader, TestHash, TestValidityCond, +}; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_stf_runner::{from_toml_path, RollupConfig}; @@ -42,8 +44,9 @@ fn rollup_bench(_bench: &mut Criterion) { let da_service = Arc::new(RngDaService::new()); - let demo_runner = - App::>::new(rollup_config.runner.storage); + let demo_runner = App::>::new( + rollup_config.runner.storage, + ); let mut demo = demo_runner.stf; let sequencer_private_key = DefaultPrivateKey::generate(); @@ -54,13 +57,8 @@ fn rollup_bench(_bench: &mut Criterion) { &sequencer_private_key, &sequencer_private_key, ); - let _prev_state_root = { - // Check if the rollup has previously been initialized - demo.init_chain(demo_genesis_config); - let apply_block_result = demo.apply_slot(Default::default(), []); - let prev_state_root = apply_block_result.state_root; - prev_state_root.0 - }; + + demo.init_chain(demo_genesis_config); // data generation let mut blobs = vec![]; @@ -75,8 +73,9 @@ fn rollup_bench(_bench: &mut Criterion) { prev_hash: TestHash([0u8; 32]), }, height, + validity_cond: TestValidityCond::default(), }; - blocks.push(filtered_block.clone()); + blocks.push(filtered_block); let blob_txs = da_service.extract_relevant_txs(&filtered_block); blobs.push(blob_txs.clone()); @@ -87,10 +86,12 @@ fn rollup_bench(_bench: &mut Criterion) { b.iter(|| { let filtered_block = &blocks[height as usize]; - let mut data_to_commit = SlotCommit::new(filtered_block.clone()); - - let apply_block_result = - demo.apply_slot(Default::default(), &mut blobs[height as usize]); + let mut data_to_commit = SlotCommit::new(*filtered_block); + let apply_block_result = demo.apply_slot( + Default::default(), + data_to_commit.slot_data(), + &mut blobs[height as usize], + ); for receipts in apply_block_result.batch_receipts { data_to_commit.add_batch(receipts); } diff --git a/examples/demo-rollup/benches/rollup_coarse_measure.rs b/examples/demo-rollup/benches/rollup_coarse_measure.rs index 63a79c618..d586ba595 100644 --- a/examples/demo-rollup/benches/rollup_coarse_measure.rs +++ b/examples/demo-rollup/benches/rollup_coarse_measure.rs @@ -14,7 +14,9 @@ use sov_db::ledger_db::{LedgerDB, SlotCommit}; use sov_demo_rollup::rng_xfers::RngDaService; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{ + TestBlob, TestBlock, TestBlockHeader, TestHash, TestValidityCond, +}; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_stf_runner::{from_toml_path, RollupConfig}; @@ -90,8 +92,9 @@ async fn main() -> Result<(), anyhow::Error> { let da_service = Arc::new(RngDaService::new()); - let demo_runner = - App::>::new(rollup_config.runner.storage); + let demo_runner = App::>::new( + rollup_config.runner.storage, + ); let mut demo = demo_runner.stf; let sequencer_private_key = DefaultPrivateKey::generate(); @@ -102,13 +105,8 @@ async fn main() -> Result<(), anyhow::Error> { &sequencer_private_key, &sequencer_private_key, ); - let _prev_state_root = { - // Check if the rollup has previously been initialized - demo.init_chain(demo_genesis_config); - let apply_block_result = demo.apply_slot(Default::default(), []); - let prev_state_root = apply_block_result.state_root; - prev_state_root.0 - }; + + demo.init_chain(demo_genesis_config); // data generation let mut blobs = vec![]; @@ -124,8 +122,9 @@ async fn main() -> Result<(), anyhow::Error> { prev_hash: TestHash([0u8; 32]), }, height, + validity_cond: TestValidityCond::default(), }; - blocks.push(filtered_block.clone()); + blocks.push(filtered_block); let blob_txs = da_service.extract_relevant_txs(&filtered_block); blobs.push(blob_txs); @@ -137,11 +136,15 @@ async fn main() -> Result<(), anyhow::Error> { for height in start_height..end_height { let filtered_block = &blocks[height as usize]; - let mut data_to_commit = SlotCommit::new(filtered_block.clone()); + let mut data_to_commit = SlotCommit::new(*filtered_block); let now = Instant::now(); - let apply_block_results = demo.apply_slot(Default::default(), &mut blobs[height as usize]); + let apply_block_results = demo.apply_slot( + Default::default(), + data_to_commit.slot_data(), + &mut blobs[height as usize], + ); apply_block_time += now.elapsed(); h_apply_block.observe(now.elapsed().as_secs_f64()); diff --git a/examples/demo-rollup/src/main.rs b/examples/demo-rollup/src/main.rs index 6ff4ea635..15baf5e5c 100644 --- a/examples/demo-rollup/src/main.rs +++ b/examples/demo-rollup/src/main.rs @@ -2,6 +2,7 @@ use std::env; use std::sync::Arc; use anyhow::Context; +use borsh::{BorshDeserialize, BorshSerialize}; use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; use demo_stf::app::{App, DefaultContext, DefaultPrivateKey}; use demo_stf::genesis_config::create_demo_genesis_config; @@ -10,13 +11,15 @@ use jupiter::da_service::CelestiaService; #[cfg(feature = "experimental")] use jupiter::da_service::DaServiceConfig; use jupiter::types::NamespaceId; -use jupiter::verifier::RollupParams; +use jupiter::verifier::{ChainValidityCondition, RollupParams}; use risc0_adapter::host::Risc0Verifier; use sov_db::ledger_db::LedgerDB; #[cfg(feature = "experimental")] use sov_ethereum::get_ethereum_rpc; use sov_modules_stf_template::{SequencerOutcome, TxEffect}; +use sov_rollup_interface::da::DaSpec; use sov_rollup_interface::services::da::DaService; +use sov_rollup_interface::zk::ValidityConditionChecker; use sov_sequencer::get_sequencer_rpc; use sov_state::storage::Storage; use sov_stf_runner::{from_toml_path, get_ledger_rpc, RollupConfig, StateTransitionRunner}; @@ -73,6 +76,23 @@ pub fn get_genesis_config() -> GenesisConfig { ) } +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CelestiaChainChecker { + current_block_hash: [u8; 32], +} + +impl ValidityConditionChecker for CelestiaChainChecker { + type Error = anyhow::Error; + + fn check(&mut self, condition: &ChainValidityCondition) -> Result<(), anyhow::Error> { + anyhow::ensure!( + condition.block_hash == self.current_block_hash, + "Invalid block hash" + ); + Ok(()) + } +} + #[tokio::main] async fn main() -> Result<(), anyhow::Error> { let rollup_config_path = env::args() @@ -101,7 +121,7 @@ async fn main() -> Result<(), anyhow::Error> { ) .await; - let mut app: App = + let mut app: App = App::new(rollup_config.runner.storage.clone()); let storage = app.get_storage(); @@ -135,7 +155,11 @@ async fn main() -> Result<(), anyhow::Error> { fn register_sequencer( da_service: DA, - demo_runner: &mut App, + demo_runner: &mut App< + Risc0Verifier, + ::ValidityCondition, + jupiter::BlobWithSender, + >, methods: &mut jsonrpsee::RpcModule<()>, ) -> Result<(), anyhow::Error> where diff --git a/examples/demo-rollup/src/rng_xfers.rs b/examples/demo-rollup/src/rng_xfers.rs index e07efe174..c3ff6c648 100644 --- a/examples/demo-rollup/src/rng_xfers.rs +++ b/examples/demo-rollup/src/rng_xfers.rs @@ -5,13 +5,15 @@ use borsh::ser::BorshSerialize; use const_rollup_config::SEQUENCER_DA_ADDRESS; use demo_stf::runtime::Runtime; use jupiter::verifier::address::CelestiaAddress; -use sov_bank::{CallMessage, Coins}; +use sov_bank::{Bank, CallMessage, Coins}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::transaction::Transaction; -use sov_modules_api::{Address, AddressBech32, PrivateKey, PublicKey, Spec}; +use sov_modules_api::{Address, AddressBech32, EncodeCall, PrivateKey, PublicKey, Spec}; use sov_rollup_interface::da::DaSpec; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{ + TestBlob, TestBlock, TestBlockHeader, TestHash, TestValidityCond, +}; use sov_rollup_interface::services::da::DaService; pub struct RngDaService; @@ -34,10 +36,11 @@ fn generate_transfers(n: usize, start_nonce: u64) -> Vec { to: address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; - let enc_msg = Runtime::::encode_bank_call(msg); + let enc_msg = + as EncodeCall>>::encode_call(msg); let tx = Transaction::::new_signed_tx(&pk, enc_msg, start_nonce + (i as u64)); let ser_tx = tx.try_to_vec().unwrap(); @@ -60,10 +63,10 @@ fn generate_create(start_nonce: u64) -> Vec { salt: 11, token_name: "sov-test-token".to_string(), initial_balance: 100000000, - minter_address: minter_address.clone(), + minter_address, authorized_minters: vec![minter_address], }; - let enc_msg = Runtime::::encode_bank_call(msg); + let enc_msg = as EncodeCall>>::encode_call(msg); let tx = Transaction::::new_signed_tx(&pk, enc_msg, start_nonce); let ser_tx = tx.try_to_vec().unwrap(); message_vec.push(ser_tx); @@ -91,6 +94,7 @@ impl DaSpec for RngDaSpec { type InclusionMultiProof = [u8; 32]; type CompletenessProof = (); type ChainParams = (); + type ValidityCondition = TestValidityCond; } #[async_trait] @@ -118,6 +122,7 @@ impl DaService for RngDaService { prev_hash: TestHash([0u8; 32]), }, height, + validity_cond: TestValidityCond { is_valid: true }, }; Ok(block) diff --git a/examples/demo-rollup/src/test_rpc.rs b/examples/demo-rollup/src/test_rpc.rs index 4180d6fab..37b3863d0 100644 --- a/examples/demo-rollup/src/test_rpc.rs +++ b/examples/demo-rollup/src/test_rpc.rs @@ -7,6 +7,7 @@ use proptest::{prop_compose, proptest}; use reqwest::header::CONTENT_TYPE; use serde_json::json; use sov_db::ledger_db::{LedgerDB, SlotCommit}; +use sov_rollup_interface::mocks::TestValidityCond; #[cfg(test)] use sov_rollup_interface::mocks::{TestBlock, TestBlockHeader, TestHash}; use sov_rollup_interface::services::da::SlotData; @@ -115,6 +116,7 @@ fn regular_test_helper(payload: serde_json::Value, expected: &serde_json::Value) prev_hash: TestHash(sha2::Sha256::digest(b"prev_header")), }, height: 0, + validity_cond: TestValidityCond::default(), })]; let batches = vec![ @@ -317,7 +319,8 @@ prop_compose! { header: TestBlockHeader { prev_hash, }, - height: 0 + height: 0, + validity_cond: TestValidityCond::default() }); total_num_batches += batches.len(); diff --git a/examples/demo-simple-stf/src/lib.rs b/examples/demo-simple-stf/src/lib.rs index 1bd2ce1f0..03089d6a4 100644 --- a/examples/demo-simple-stf/src/lib.rs +++ b/examples/demo-simple-stf/src/lib.rs @@ -1,18 +1,21 @@ #![deny(missing_docs)] #![doc = include_str!("../README.md")] use std::io::Read; +use std::marker::PhantomData; use sha2::Digest; use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::{BatchReceipt, SlotResult, StateTransitionFunction}; -use sov_rollup_interface::zk::Zkvm; - -#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize)] +use sov_rollup_interface::zk::{ValidityCondition, Zkvm}; /// An implementation of the /// [`StateTransitionFunction`](sov_rollup_interface::stf::StateTransitionFunction) /// that is specifically designed to check if someone knows a preimage of a specific hash. -pub struct CheckHashPreimageStf {} +#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize, Default)] +pub struct CheckHashPreimageStf { + phantom_data: PhantomData, +} /// Outcome of the apply_slot method. #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] @@ -23,7 +26,9 @@ pub enum ApplySlotResult { Success, } -impl StateTransitionFunction for CheckHashPreimageStf { +impl StateTransitionFunction + for CheckHashPreimageStf +{ // Since our rollup is stateless, we don't need to consider the StateRoot. type StateRoot = (); @@ -40,14 +45,17 @@ impl StateTransitionFunction for CheckHashP // However, in this tutorial, we won't use it. type Witness = (); + type Condition = Cond; + // Perform one-time initialization for the genesis block. fn init_chain(&mut self, _params: Self::InitialState) { // Do nothing } - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, _witness: Self::Witness, + _slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -57,6 +65,7 @@ impl StateTransitionFunction for CheckHashP > where I: IntoIterator, + Data: SlotData, { let mut receipts = vec![]; for blob in blobs { @@ -99,4 +108,8 @@ impl StateTransitionFunction for CheckHashP witness: (), } } + + fn get_current_state_root(&self) -> anyhow::Result { + Ok(()) + } } diff --git a/examples/demo-simple-stf/tests/stf_test.rs b/examples/demo-simple-stf/tests/stf_test.rs index 9620e98ed..4f45656a1 100644 --- a/examples/demo-simple-stf/tests/stf_test.rs +++ b/examples/demo-simple-stf/tests/stf_test.rs @@ -2,7 +2,7 @@ use std::fmt::Display; use std::str::FromStr; use demo_simple_stf::{ApplySlotResult, CheckHashPreimageStf}; -use sov_rollup_interface::mocks::{MockZkvm, TestBlob}; +use sov_rollup_interface::mocks::{MockZkvm, TestBlob, TestBlock, TestValidityCond}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::AddressTrait; @@ -62,14 +62,19 @@ fn test_stf() { let preimage = vec![0; 32]; let test_blob = TestBlob::::new(preimage, address, [0; 32]); - let stf = &mut CheckHashPreimageStf {}; + let stf = &mut CheckHashPreimageStf::::default(); + let data = TestBlock::default(); let mut blobs = [test_blob]; StateTransitionFunction::>::init_chain(stf, ()); - let result = - StateTransitionFunction::>::apply_slot(stf, (), &mut blobs); + let result = StateTransitionFunction::>::apply_slot( + stf, + (), + &data, + &mut blobs, + ); assert_eq!(1, result.batch_receipts.len()); let receipt = result.batch_receipts[0].clone(); diff --git a/examples/demo-stf/Cargo.toml b/examples/demo-stf/Cargo.toml index a98699989..1be913d81 100644 --- a/examples/demo-stf/Cargo.toml +++ b/examples/demo-stf/Cargo.toml @@ -38,12 +38,12 @@ sov-state = { path = "../../module-system/sov-state", default-features = false } sov-modules-api = { path = "../../module-system/sov-modules-api", default-features = false, features = ["macros"] } sov-sequencer = { path = "../../full-node/sov-sequencer", optional = true } sov-stf-runner = { path = "../../full-node/sov-stf-runner", optional = true } - # Only enable the evm on "experimental" feature sov-evm = { path = "../../module-system/module-implementations/sov-evm", default-features = false, optional = true } [dev-dependencies] sov-rollup-interface = { path = "../../rollup-interface", features = ["mocks"] } +sov-data-generators = { path = "../../module-system/utils/sov-data-generators" } tempfile = { workspace = true } rand = "0.8" diff --git a/examples/demo-stf/README.md b/examples/demo-stf/README.md index 457244701..e72a2ffd9 100644 --- a/examples/demo-stf/README.md +++ b/examples/demo-stf/README.md @@ -35,7 +35,6 @@ several parameters that specify its exact behavior. In order, these generics are native mode we just read values straight from disk. 2. `Runtime`: a collection of modules which make up the rollup's public interface - To implement your state transition function, you simply need to specify values for each of these fields. So, a typical app definition looks like this: @@ -78,22 +77,25 @@ initialization code for each module which will get run at your rollup's genesis. allow your runtime to dispatch transactions and queries, and tell it which serialization scheme to use. We recommend borsh, since it's both fast and safe for hashing. -### Implementing Hooks for the Runtime: +### Implementing Hooks for the Runtime: + The next step is to implement `Hooks` for `MyRuntime`. Hooks are abstractions that allows for the injection of custom logic into the transaction processing pipeline. There are two kind of hooks: `TxHooks`, which has the following methods: + 1. `pre_dispatch_tx_hook`: Invoked immediately before each transaction is processed. This is a good time to apply stateful transaction verification, like checking the nonce. 2. `post_dispatch_tx_hook`: Invoked immediately after each transaction is executed. This is a good place to perform any post-execution operations, like incrementing the nonce. -`ApplyBlobHooks`, which has the following methods: +`ApplyBlobHooks`, which has the following methods: + 1. `begin_blob_hook `Invoked at the beginning of the `apply_blob` function, before the blob is deserialized into a group of transactions. This is a good time to ensure that the sequencer is properly bonded. 2. `end_blob_hook` invoked at the end of the `apply_blob` function. This is a good place to reward sequencers. To use the `AppTemplate`, the runtime needs to provide implementation of these hooks which specifies what needs to happen at each of these four stages. -In this demo, we only rely on two modules which need access to the hooks - `sov-accounts` and `sequencer-registry`. +In this demo, we only rely on two modules which need access to the hooks - `sov-accounts` and `sequencer-registry`. The `sov-accounts` module implements `TxHooks` because it needs to check and increment the sender nonce for every transaction. The `sequencer-registry` implements `ApplyBlobHooks` since it is responsible for managing the sequencer bond. @@ -152,7 +154,6 @@ complete State Transition Function! Your modules implement rpc methods via the `rpc_gen` macro, in order to enable the full-node to expose them, annotate the `Runtime` with `expose_rpc`. In the example above, you can see how to use the `expose_rpc` macro on the `native` `Runtime`. - ## Make Full Node Itegrations Simpler with the State Transition Runner: Now that we have an app, we want to be able to run it. For any custom state transition, your full node implementation is going to need a little @@ -172,7 +173,6 @@ The State Transition Runner struct contains logic related to initialization and 2. `run` - which runs the rollup. 3. `start_rpc_server` - which exposes an RPC server. - ```rust let mut app: App = App::new(rollup_config.runner.storage.clone()); @@ -195,4 +195,4 @@ runner.run().await?; Whew, that was a lot of information. To recap, implementing your own state transition function is as simple as plugging a Runtime, a Transaction Verifier, and some Transaction Hooks into the pre-built app template. Once you've done that, -you can integrate with any DA layer and ZKVM to create a Sovereign Rollup. \ No newline at end of file +you can integrate with any DA layer and ZKVM to create a Sovereign Rollup. diff --git a/examples/demo-stf/src/app.rs b/examples/demo-stf/src/app.rs index 84efd30f3..48b6827b2 100644 --- a/examples/demo-stf/src/app.rs +++ b/examples/demo-stf/src/app.rs @@ -8,7 +8,7 @@ use sov_modules_api::Spec; use sov_modules_stf_template::AppTemplate; pub use sov_modules_stf_template::Batch; use sov_rollup_interface::da::BlobReaderTrait; -use sov_rollup_interface::zk::Zkvm; +use sov_rollup_interface::zk::{ValidityCondition, Zkvm}; #[cfg(feature = "native")] use sov_state::ProverStorage; use sov_state::{Storage, ZkStorage}; @@ -20,13 +20,13 @@ use sov_stf_runner::StorageConfig; use crate::runtime::Runtime; #[cfg(feature = "native")] -pub struct App { - pub stf: AppTemplate, Vm, B>, +pub struct App { + pub stf: AppTemplate, B>, pub batch_builder: Option, DefaultContext>>, } #[cfg(feature = "native")] -impl App { +impl App { pub fn new(storage_config: StorageConfig) -> Self { let storage = ProverStorage::with_config(storage_config).expect("Failed to open prover storage"); @@ -49,9 +49,9 @@ impl App { } } -pub fn create_zk_app_template( +pub fn create_zk_app_template( runtime_config: [u8; 32], -) -> AppTemplate, Vm, B> { +) -> AppTemplate, B> { let storage = ZkStorage::with_config(runtime_config).expect("Failed to open zk storage"); AppTemplate::new(storage, Runtime::default()) } diff --git a/examples/demo-stf/src/genesis_config.rs b/examples/demo-stf/src/genesis_config.rs index 0284374b2..08306c56a 100644 --- a/examples/demo-stf/src/genesis_config.rs +++ b/examples/demo-stf/src/genesis_config.rs @@ -3,7 +3,7 @@ use sov_election::ElectionConfig; use sov_evm::{AccountData, EvmConfig, SpecId}; pub use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Context, PrivateKey, PublicKey}; pub use sov_state::config::Config as StorageConfig; use sov_value_setter::ValueSetterConfig; diff --git a/examples/demo-stf/src/runtime.rs b/examples/demo-stf/src/runtime.rs index f81d1465d..5c0834aec 100644 --- a/examples/demo-stf/src/runtime.rs +++ b/examples/demo-stf/src/runtime.rs @@ -9,10 +9,12 @@ use sov_election::{ElectionRpcImpl, ElectionRpcServer}; use sov_evm::query::{EvmRpcImpl, EvmRpcServer}; #[cfg(feature = "native")] pub use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::SlotHooks; use sov_modules_api::macros::DefaultRuntime; #[cfg(feature = "native")] use sov_modules_api::macros::{expose_rpc, CliWallet}; use sov_modules_api::{Context, DispatchCall, Genesis, MessageCodec}; +use sov_rollup_interface::zk::ValidityCondition; #[cfg(feature = "native")] use sov_sequencer_registry::{SequencerRegistryRpcImpl, SequencerRegistryRpcServer}; #[cfg(feature = "native")] @@ -87,4 +89,24 @@ pub struct Runtime { pub evm: sov_evm::Evm, } -impl sov_modules_stf_template::Runtime for Runtime {} +impl SlotHooks for Runtime { + type Context = C; + + fn begin_slot_hook( + &self, + _slot_data: &impl sov_rollup_interface::services::da::SlotData, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + } + + fn end_slot_hook( + &self, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + } +} + +impl sov_modules_stf_template::Runtime + for Runtime +{ +} diff --git a/examples/demo-stf/src/sov-cli/native.rs b/examples/demo-stf/src/sov-cli/native.rs index 5f27c459e..eb5240e66 100644 --- a/examples/demo-stf/src/sov-cli/native.rs +++ b/examples/demo-stf/src/sov-cli/native.rs @@ -15,8 +15,9 @@ use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::transaction::Transaction; use sov_modules_api::{AddressBech32, PrivateKey, PublicKey, Spec}; use sov_modules_stf_template::RawTx; +#[cfg(test)] +use sov_rollup_interface::mocks::TestBlock; use sov_sequencer::SubmitTransaction; - type C = DefaultContext; type Address = ::Address; @@ -349,7 +350,7 @@ mod test { use demo_stf::runtime::{GenesisConfig, Runtime}; use sov_modules_api::Address; use sov_modules_stf_template::{AppTemplate, Batch, RawTx, SequencerOutcome}; - use sov_rollup_interface::mocks::MockZkvm; + use sov_rollup_interface::mocks::{MockZkvm, TestValidityCond}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_state::WorkingSet; use sov_stf_runner::Config; @@ -424,7 +425,7 @@ mod test { // Test helpers struct TestDemo { config: GenesisConfig, - demo: AppTemplate, MockZkvm, TestBlob>, + demo: AppTemplate, TestBlob>, } impl TestDemo { @@ -444,7 +445,7 @@ mod test { Self { config: genesis_config, - demo: App::::new(runner_config.storage).stf, + demo: App::::new(runner_config.storage).stf, } } } @@ -500,18 +501,20 @@ mod test { } fn execute_txs( - demo: &mut AppTemplate, MockZkvm, TestBlob>, + demo: &mut AppTemplate, TestBlob>, config: GenesisConfig, txs: Vec, ) { StateTransitionFunction::::init_chain(demo, config); + let data = TestBlock::default(); let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); let mut blobs = [blob]; let apply_block_result = StateTransitionFunction::::apply_slot( demo, Default::default(), + &data, &mut blobs, ); @@ -526,7 +529,7 @@ mod test { } fn get_balance( - demo: &mut AppTemplate, MockZkvm, TestBlob>, + demo: &mut AppTemplate, TestBlob>, token_deployer_address: &Address, user_address: Address, ) -> Option { diff --git a/examples/demo-stf/src/tests/da_simulation.rs b/examples/demo-stf/src/tests/da_simulation.rs new file mode 100644 index 000000000..1ae79572e --- /dev/null +++ b/examples/demo-stf/src/tests/da_simulation.rs @@ -0,0 +1,56 @@ +use std::rc::Rc; + +use sov_data_generators::election_data::{ + BadNonceElectionCallMessages, BadSerializationElectionCallMessages, BadSigElectionCallMessages, + ElectionCallMessages, InvalidElectionCallMessages, +}; +use sov_data_generators::value_setter_data::{ValueSetterMessage, ValueSetterMessages}; +use sov_data_generators::MessageGenerator; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_stf_template::RawTx; + +use crate::runtime::Runtime; + +type C = DefaultContext; + +pub fn simulate_da( + value_setter_admin: DefaultPrivateKey, + election_admin: DefaultPrivateKey, +) -> Vec { + let mut messages = Vec::default(); + + let election: ElectionCallMessages = ElectionCallMessages::new(election_admin); + messages.extend(election.create_raw_txs::>()); + + let value_setter = ValueSetterMessages::new(vec![ValueSetterMessage { + admin: Rc::new(value_setter_admin), + messages: vec![99, 33], + }]); + messages.extend(value_setter.create_raw_txs::>()); + + messages +} + +pub fn simulate_da_with_revert_msg(election_admin: DefaultPrivateKey) -> Vec { + let election = InvalidElectionCallMessages::new(election_admin); + election.create_raw_txs::>() +} + +pub fn simulate_da_with_bad_sig(election_admin: DefaultPrivateKey) -> Vec { + let election = BadSigElectionCallMessages::::new(election_admin); + election.create_raw_txs::>() +} + +// TODO: Remove once we fix test with bad nonce +// https://github.com/Sovereign-Labs/sovereign-sdk/issues/235 +#[allow(unused)] +pub fn simulate_da_with_bad_nonce(election_admin: DefaultPrivateKey) -> Vec { + let election = BadNonceElectionCallMessages::new(election_admin); + election.create_raw_txs::>() +} + +pub fn simulate_da_with_bad_serialization(election_admin: DefaultPrivateKey) -> Vec { + let election = BadSerializationElectionCallMessages::new(election_admin); + election.create_raw_txs::>() +} diff --git a/examples/demo-stf/src/tests/data_generation/election_data.rs b/examples/demo-stf/src/tests/data_generation/election_data.rs deleted file mode 100644 index 2eb68d9c9..000000000 --- a/examples/demo-stf/src/tests/data_generation/election_data.rs +++ /dev/null @@ -1,294 +0,0 @@ -use std::rc::Rc; - -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::PrivateKey; - -use super::*; - -struct CallGenerator { - election_admin_nonce: u64, - election_admin: Rc, - voters: Vec>, -} - -impl CallGenerator { - fn new(election_admin: Rc) -> Self { - let voters = vec![ - Rc::new(DefaultPrivateKey::generate()), - Rc::new(DefaultPrivateKey::generate()), - Rc::new(DefaultPrivateKey::generate()), - ]; - Self { - election_admin_nonce: 0, - election_admin, - voters, - } - } - - fn inc_nonce(&mut self) { - self.election_admin_nonce += 1; - } - - fn create_voters_and_vote( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - let set_candidates_message = sov_election::CallMessage::SetCandidates { - names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], - }; - - messages.push(( - self.election_admin.clone(), - set_candidates_message, - self.election_admin_nonce, - )); - self.inc_nonce(); - - for voter in self.voters.clone() { - let add_voter_message = - sov_election::CallMessage::AddVoter(voter.pub_key().to_address()); - - messages.push(( - self.election_admin.clone(), - add_voter_message, - self.election_admin_nonce, - )); - - let vote_message = sov_election::CallMessage::Vote(1); - messages.push((voter, vote_message, 0)); - self.inc_nonce(); - } - - messages - } - - fn freeze_vote( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - let freeze_message = sov_election::CallMessage::FreezeElection; - messages.push(( - self.election_admin.clone(), - freeze_message, - self.election_admin_nonce, - )); - self.inc_nonce(); - - messages - } - - fn all_messages( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - messages.extend(self.create_voters_and_vote()); - messages.extend(self.freeze_vote()); - messages - } -} - -pub struct ElectionCallMessages { - election_admin: Rc, -} - -impl ElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for ElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct InvalidElectionCallMessages { - election_admin: Rc, -} - -impl InvalidElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for InvalidElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - - let mut messages = Vec::default(); - - messages.extend(call_generator.create_voters_and_vote()); - - // Additional invalid message: This voter already voted. - { - let voter = call_generator.voters[0].clone(); - let vote_message = sov_election::CallMessage::Vote(1); - messages.push((voter, vote_message, 1)); - } - - messages.extend(call_generator.freeze_vote()); - messages - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct BadSigElectionCallMessages { - election_admin: Rc, -} - -impl BadSigElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadSigElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - - if is_last { - let tx = Transaction::::new_signed_tx(sender, message.clone(), nonce); - Transaction::new( - DefaultPrivateKey::generate().pub_key(), - message, - tx.signature().clone(), - nonce, - ) - } else { - Transaction::::new_signed_tx(sender, message, nonce) - } - } -} - -pub struct BadNonceElectionCallMessages { - election_admin: Rc, -} - -impl BadNonceElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadNonceElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - flag: bool, - ) -> Transaction { - let nonce = if flag { nonce + 1 } else { nonce }; - - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct BadSerializationElectionCallMessages { - election_admin: Rc, -} - -impl BadSerializationElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadSerializationElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction { - let call_data = if is_last { - vec![1, 2, 3] - } else { - Runtime::::encode_election_call(message) - }; - - Transaction::::new_signed_tx(sender, call_data, nonce) - } -} diff --git a/examples/demo-stf/src/tests/data_generation/mod.rs b/examples/demo-stf/src/tests/data_generation/mod.rs deleted file mode 100644 index 8f248551a..000000000 --- a/examples/demo-stf/src/tests/data_generation/mod.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::rc::Rc; - -use borsh::BorshSerialize; -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::transaction::Transaction; -use sov_modules_api::PublicKey; -use sov_modules_stf_template::RawTx; - -use crate::runtime::Runtime; - -mod election_data; -mod value_setter_data; - -pub fn simulate_da( - value_setter_admin: DefaultPrivateKey, - election_admin: DefaultPrivateKey, -) -> Vec { - let election = election_data::ElectionCallMessages::new(election_admin); - - let mut messages = Vec::default(); - messages.extend(election.create_raw_txs()); - - let value_setter = value_setter_data::ValueSetterMessages::new(value_setter_admin); - messages.extend(value_setter.create_raw_txs()); - - messages -} - -pub fn simulate_da_with_revert_msg(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::InvalidElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -pub fn simulate_da_with_bad_sig(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadSigElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -// TODO: Remove once we fix test with bad nonce -// https://github.com/Sovereign-Labs/sovereign-sdk/issues/235 -#[allow(unused)] -pub fn simulate_da_with_bad_nonce(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadNonceElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -pub fn simulate_da_with_bad_serialization(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadSerializationElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -trait MessageGenerator { - type Call; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)>; - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction; - - fn create_raw_txs(&self) -> Vec { - let mut messages_iter = self.create_messages().into_iter().peekable(); - let mut serialized_messages = Vec::default(); - while let Some((sender, m, nonce)) = messages_iter.next() { - let is_last = messages_iter.peek().is_none(); - - let tx = self.create_tx(&sender, m, nonce, is_last); - - serialized_messages.push(RawTx { - data: tx.try_to_vec().unwrap(), - }) - } - serialized_messages - } -} diff --git a/examples/demo-stf/src/tests/data_generation/value_setter_data.rs b/examples/demo-stf/src/tests/data_generation/value_setter_data.rs deleted file mode 100644 index a63796a50..000000000 --- a/examples/demo-stf/src/tests/data_generation/value_setter_data.rs +++ /dev/null @@ -1,52 +0,0 @@ -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; - -use super::*; - -pub struct ValueSetterMessages { - admin: Rc, -} - -impl ValueSetterMessages { - pub fn new(admin: DefaultPrivateKey) -> Self { - Self { - admin: Rc::new(admin), - } - } -} - -impl MessageGenerator for ValueSetterMessages { - type Call = sov_value_setter::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let admin = self.admin.clone(); - let mut value_setter_admin_nonce = 0; - let mut messages = Vec::default(); - - let new_value = 99; - - let set_value_msg_1: sov_value_setter::CallMessage = - sov_value_setter::CallMessage::SetValue(new_value); - - let new_value = 33; - let set_value_msg_2 = sov_value_setter::CallMessage::SetValue(new_value); - - messages.push((admin.clone(), set_value_msg_1, value_setter_admin_nonce)); - - value_setter_admin_nonce += 1; - messages.push((admin, set_value_msg_2, value_setter_admin_nonce)); - - messages - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_value_setter_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} diff --git a/examples/demo-stf/src/tests/mod.rs b/examples/demo-stf/src/tests/mod.rs index b100992bf..0bb81faf7 100644 --- a/examples/demo-stf/src/tests/mod.rs +++ b/examples/demo-stf/src/tests/mod.rs @@ -1,15 +1,14 @@ use std::path::Path; -use borsh::BorshSerialize; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::Address; -use sov_modules_stf_template::{AppTemplate, Batch, SequencerOutcome, TxEffect}; -use sov_rollup_interface::stf::BatchReceipt; +use sov_modules_stf_template::AppTemplate; +use sov_rollup_interface::mocks::TestValidityCond; use sov_state::ProverStorage; use crate::runtime::Runtime; -mod data_generation; +mod da_simulation; mod stf_tests; mod tx_revert_tests; pub(crate) type C = DefaultContext; @@ -20,26 +19,12 @@ pub fn create_new_demo( path: impl AsRef, ) -> AppTemplate< DefaultContext, - Runtime, + TestValidityCond, sov_rollup_interface::mocks::MockZkvm, + Runtime, TestBlob, > { let runtime = Runtime::default(); let storage = ProverStorage::with_path(path).unwrap(); AppTemplate::new(storage, runtime) } - -pub fn has_tx_events(apply_blob_outcome: &BatchReceipt) -> bool { - let events = apply_blob_outcome - .tx_receipts - .iter() - .flat_map(|receipts| receipts.events.iter()); - - events.peekable().peek().is_some() -} - -pub fn new_test_blob(batch: Batch, address: &[u8]) -> TestBlob { - let address = Address::try_from(address).unwrap(); - let data = batch.try_to_vec().unwrap(); - TestBlob::new(data, address, [0; 32]) -} diff --git a/examples/demo-stf/src/tests/stf_tests.rs b/examples/demo-stf/src/tests/stf_tests.rs index 340cd185f..5e2253e3f 100644 --- a/examples/demo-stf/src/tests/stf_tests.rs +++ b/examples/demo-stf/src/tests/stf_tests.rs @@ -1,17 +1,19 @@ #[cfg(test)] pub mod test { + + use sov_data_generators::{has_tx_events, new_test_blob_from_batch}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; use sov_modules_stf_template::{Batch, SequencerOutcome}; - use sov_rollup_interface::mocks::MockZkvm; + use sov_rollup_interface::mocks::{MockZkvm, TestBlock}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_state::{ProverStorage, WorkingSet}; use crate::genesis_config::{create_demo_config, DEMO_SEQUENCER_DA_ADDRESS, LOCKED_AMOUNT}; use crate::runtime::Runtime; - use crate::tests::data_generation::simulate_da; - use crate::tests::{create_new_demo, has_tx_events, new_test_blob, TestBlob, C}; + use crate::tests::da_simulation::simulate_da; + use crate::tests::{create_new_demo, TestBlob, C}; #[test] fn test_demo_values_in_db() { @@ -31,13 +33,16 @@ pub mod test { StateTransitionFunction::::init_chain(&mut demo, config); let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); + let result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -92,12 +97,14 @@ pub mod test { let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -150,12 +157,14 @@ pub mod test { StateTransitionFunction::::init_chain(&mut demo, config); let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -208,12 +217,14 @@ pub mod test { let some_sequencer: [u8; 32] = [121; 32]; let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &some_sequencer); + let blob = new_test_blob_from_batch(Batch { txs }, &some_sequencer, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); diff --git a/examples/demo-stf/src/tests/tx_revert_tests.rs b/examples/demo-stf/src/tests/tx_revert_tests.rs index e46cbe5bd..c4bd19119 100644 --- a/examples/demo-stf/src/tests/tx_revert_tests.rs +++ b/examples/demo-stf/src/tests/tx_revert_tests.rs @@ -1,21 +1,24 @@ use borsh::BorshSerialize; use const_rollup_config::SEQUENCER_DA_ADDRESS; use sov_accounts::Response; +use sov_data_generators::{has_tx_events, new_test_blob_from_batch}; +use sov_election::Election; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::transaction::Transaction; -use sov_modules_api::{PrivateKey, PublicKey}; +use sov_modules_api::{EncodeCall, PrivateKey, PublicKey}; use sov_modules_stf_template::{Batch, RawTx, SequencerOutcome, SlashingReason}; -use sov_rollup_interface::mocks::MockZkvm; +use sov_rollup_interface::mocks::{MockZkvm, TestBlock}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_state::{ProverStorage, WorkingSet}; use super::create_new_demo; -use super::data_generation::{simulate_da_with_bad_sig, simulate_da_with_revert_msg}; use crate::genesis_config::{create_demo_config, DEMO_SEQUENCER_DA_ADDRESS, LOCKED_AMOUNT}; use crate::runtime::Runtime; -use crate::tests::data_generation::simulate_da_with_bad_serialization; -use crate::tests::{has_tx_events, new_test_blob, TestBlob}; +use crate::tests::da_simulation::{ + simulate_da_with_bad_serialization, simulate_da_with_bad_sig, simulate_da_with_revert_msg, +}; +use crate::tests::TestBlob; const SEQUENCER_BALANCE_DELTA: u64 = 1; const SEQUENCER_BALANCE: u64 = LOCKED_AMOUNT + SEQUENCER_BALANCE_DELTA; @@ -32,20 +35,24 @@ fn test_tx_revert() { &value_setter_admin_private_key, &election_admin_private_key, ); - let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address.clone(); + let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address; { let mut demo = create_new_demo(path); + // TODO: Maybe complete with actual block data + let _data = TestBlock::default(); StateTransitionFunction::::init_chain(&mut demo, config); let txs = simulate_da_with_revert_msg(election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -111,14 +118,17 @@ fn test_nonce_incremented_on_revert() { ); { + // TODO: Maybe complete with actual block data + let _data = TestBlock::default(); let mut demo = create_new_demo(path); StateTransitionFunction::::init_chain(&mut demo, config); - let set_candidates_message = Runtime::::encode_election_call( - sov_election::CallMessage::SetCandidates { - names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], - }, - ); + let set_candidates_message = + as EncodeCall>>::encode_call( + sov_election::CallMessage::SetCandidates { + names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], + }, + ); let set_candidates_message = Transaction::::new_signed_tx( &election_admin_private_key, @@ -126,9 +136,10 @@ fn test_nonce_incremented_on_revert() { 0, ); - let add_voter_message = Runtime::::encode_election_call( - sov_election::CallMessage::AddVoter(voter.pub_key().to_address()), - ); + let add_voter_message = + as EncodeCall>>::encode_call( + sov_election::CallMessage::AddVoter(voter.pub_key().to_address()), + ); let add_voter_message = Transaction::::new_signed_tx( &election_admin_private_key, add_voter_message, @@ -137,7 +148,9 @@ fn test_nonce_incremented_on_revert() { // There's only 2 candidates let vote_message = - Runtime::::encode_election_call(sov_election::CallMessage::Vote(100)); + as EncodeCall>>::encode_call( + sov_election::CallMessage::Vote(100), + ); let vote_message = Transaction::::new_signed_tx(&voter, vote_message, original_nonce); @@ -149,12 +162,14 @@ fn test_nonce_incremented_on_revert() { }) .collect(); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -207,17 +222,21 @@ fn test_tx_bad_sig() { { let mut demo = create_new_demo(path); + // TODO: Maybe complete with actual block data + let _data = TestBlock::default(); StateTransitionFunction::::init_chain(&mut demo, config); let txs = simulate_da_with_bad_sig(election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); @@ -266,7 +285,7 @@ fn test_tx_bad_serialization() { &value_setter_admin_private_key, &election_admin_private_key, ); - let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address.clone(); + let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address; let sequencer_balance_before = { let mut demo = create_new_demo(path); StateTransitionFunction::::init_chain(&mut demo, config); @@ -280,7 +299,7 @@ fn test_tx_bad_serialization() { demo.runtime .bank .get_balance_of( - sequencer_rollup_address.clone(), + sequencer_rollup_address, coins.token_address, &mut working_set, ) @@ -288,15 +307,20 @@ fn test_tx_bad_serialization() { }; { + // TODO: Maybe complete with actual block data + let _data = TestBlock::default(); + let mut demo = create_new_demo(path); let txs = simulate_da_with_bad_serialization(election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = TestBlock::default(); let apply_block_result = StateTransitionFunction::::apply_slot( &mut demo, Default::default(), + &data, &mut blobs, ); diff --git a/full-node/sov-ethereum/src/lib.rs b/full-node/sov-ethereum/src/lib.rs index 9f6bfec23..07229bc5d 100644 --- a/full-node/sov-ethereum/src/lib.rs +++ b/full-node/sov-ethereum/src/lib.rs @@ -22,6 +22,7 @@ pub mod experimental { use sov_evm::evm::{EthAddress, EvmTransaction}; use sov_modules_api::transaction::Transaction; use sov_modules_api::utils::to_jsonrpsee_error_object; + use sov_modules_api::EncodeCall; const GAS_PER_BYTE: usize = 120; const ETH_RPC_ERROR: &str = "ETH_RPC_ERROR"; @@ -54,7 +55,10 @@ pub mod experimental { .or_insert(0); let tx = CallMessage { tx: evm_tx }; - let message = Runtime::::encode_evm_call(tx); + let message = + as EncodeCall>>::encode_call( + tx, + ); let tx = Transaction::::new_signed_tx( &self.tx_signer_prov_key, message, diff --git a/full-node/sov-stf-runner/src/batch_builder.rs b/full-node/sov-stf-runner/src/batch_builder.rs index 0af49152e..b43d0b771 100644 --- a/full-node/sov-stf-runner/src/batch_builder.rs +++ b/full-node/sov-stf-runner/src/batch_builder.rs @@ -141,10 +141,10 @@ mod tests { use sov_modules_api::default_signature::DefaultPublicKey; use sov_modules_api::macros::DefaultRuntime; use sov_modules_api::transaction::Transaction; - use sov_modules_api::{Context, DispatchCall, Genesis, MessageCodec, PrivateKey}; + use sov_modules_api::{Context, DispatchCall, EncodeCall, Genesis, MessageCodec, PrivateKey}; use sov_rollup_interface::services::batch_builder::BatchBuilder; use sov_state::{DefaultStorageSpec, ProverStorage, Storage}; - use sov_value_setter::{CallMessage, ValueSetterConfig}; + use sov_value_setter::{CallMessage, ValueSetter, ValueSetterConfig}; use tempfile::TempDir; use super::*; @@ -167,9 +167,9 @@ mod tests { fn generate_valid_tx(private_key: &DefaultPrivateKey, value: u32) -> Vec { let msg = CallMessage::SetValue(value); - let msg = TestRuntime::::encode_value_setter_call(msg); + let msg = as EncodeCall>>::encode_call(msg); - Transaction::new_signed_tx(private_key, msg, 1) + Transaction::::new_signed_tx(private_key, msg, 1) .try_to_vec() .unwrap() } @@ -184,7 +184,7 @@ mod tests { fn generate_signed_tx_with_invalid_payload(private_key: &DefaultPrivateKey) -> Vec { let msg = generate_random_bytes(); - Transaction::new_signed_tx(private_key, msg, 1) + Transaction::::new_signed_tx(private_key, msg, 1) .try_to_vec() .unwrap() } diff --git a/full-node/sov-stf-runner/src/lib.rs b/full-node/sov-stf-runner/src/lib.rs index 173e2cccf..f726db95e 100644 --- a/full-node/sov-stf-runner/src/lib.rs +++ b/full-node/sov-stf-runner/src/lib.rs @@ -34,7 +34,11 @@ pub struct StateTransitionRunner where DA: DaService, Vm: Zkvm, - ST: StateTransitionFunction::Spec as DaSpec>::BlobTransaction>, + ST: StateTransitionFunction< + Vm, + <::Spec as DaSpec>::BlobTransaction, + Condition = ::ValidityCondition, + >, { start_height: u64, da_service: DA, @@ -48,7 +52,11 @@ impl StateTransitionRunner where DA: DaService + Clone + Send + Sync + 'static, Vm: Zkvm, - ST: StateTransitionFunction::Spec as DaSpec>::BlobTransaction>, + ST: StateTransitionFunction< + Vm, + <::Spec as DaSpec>::BlobTransaction, + Condition = ::ValidityCondition, + >, { /// Creates a new `StateTransitionRunner` runner. pub fn new( @@ -65,16 +73,13 @@ where // Check if the rollup has previously been initialized if should_init_chain { info!("No history detected. Initializing chain..."); - app.init_chain(genesis_config); + let ret_hash = app.init_chain(genesis_config); info!("Chain initialization is done."); + ret_hash } else { debug!("Chain is already initialized. Skipping initialization."); + app.get_current_state_root()? } - - let res = app.apply_slot(Default::default(), []); - // HACK: Tell the rollup that you're running an empty DA layer block so that it will return the latest state root. - // This will be removed shortly. - res.state_root }; let listen_address = SocketAddr::new(rpc_config.bind_host.parse()?, rpc_config.bind_port); @@ -126,7 +131,9 @@ where let mut data_to_commit = SlotCommit::new(filtered_block.clone()); - let slot_result = self.app.apply_slot(Default::default(), &mut blobs); + let slot_result = self + .app + .apply_slot(Default::default(), &filtered_block, &mut blobs); for receipt in slot_result.batch_receipts { data_to_commit.add_batch(receipt); } diff --git a/module-system/module-implementations/examples/sov-election/src/tests.rs b/module-system/module-implementations/examples/sov-election/src/tests.rs index e0c71663b..283fa251b 100644 --- a/module-system/module-implementations/examples/sov-election/src/tests.rs +++ b/module-system/module-implementations/examples/sov-election/src/tests.rs @@ -17,12 +17,15 @@ fn test_election() { let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); let mut native_working_set = WorkingSet::new(native_storage); - test_module::(admin.clone(), &mut native_working_set); + test_module::(admin, &mut native_working_set); - let (_log, witness) = native_working_set.checkpoint().freeze(); - let zk_storage = ZkStorage::new([0u8; 32]); - let mut zk_working_set = WorkingSet::with_witness(zk_storage, witness); - test_module::(admin, &mut zk_working_set); + // Zk context + { + let (_log, witness) = native_working_set.checkpoint().freeze(); + let zk_storage = ZkStorage::new([0u8; 32]); + let mut zk_working_set = WorkingSet::with_witness(zk_storage, witness); + test_module::(admin, &mut zk_working_set); + } } fn test_module(admin: C::Address, working_set: &mut WorkingSet) { diff --git a/module-system/module-implementations/examples/sov-value-setter/src/tests.rs b/module-system/module-implementations/examples/sov-value-setter/src/tests.rs index 6bb5ad002..c45a4e803 100644 --- a/module-system/module-implementations/examples/sov-value-setter/src/tests.rs +++ b/module-system/module-implementations/examples/sov-value-setter/src/tests.rs @@ -12,11 +12,10 @@ fn test_value_setter() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let admin = Address::from([1; 32]); // Test Native-Context + #[cfg(feature = "native")] { - let config = ValueSetterConfig { - admin: admin.clone(), - }; - let context = DefaultContext::new(admin.clone()); + let config = ValueSetterConfig { admin }; + let context = DefaultContext::new(admin); test_value_setter_helper(context, &config, &mut working_set); } @@ -24,9 +23,7 @@ fn test_value_setter() { // Test Zk-Context { - let config = ValueSetterConfig { - admin: admin.clone(), - }; + let config = ValueSetterConfig { admin }; let zk_context = ZkDefaultContext::new(admin); let mut zk_working_set = WorkingSet::with_witness(ZkStorage::new([0u8; 32]), witness); test_value_setter_helper(zk_context, &config, &mut zk_working_set); @@ -74,11 +71,12 @@ fn test_err_on_sender_is_not_admin() { let sender_not_admin = Address::from([2; 32]); // Test Native-Context + #[cfg(feature = "native")] { let config = ValueSetterConfig { - admin: sender_not_admin.clone(), + admin: sender_not_admin, }; - let context = DefaultContext::new(sender.clone()); + let context = DefaultContext::new(sender); test_err_on_sender_is_not_admin_helper(context, &config, &mut native_working_set); } let (_, witness) = native_working_set.checkpoint().freeze(); diff --git a/module-system/module-implementations/integration-tests/Cargo.toml b/module-system/module-implementations/integration-tests/Cargo.toml index 7c92d1b9c..28c52e9d6 100644 --- a/module-system/module-implementations/integration-tests/Cargo.toml +++ b/module-system/module-implementations/integration-tests/Cargo.toml @@ -8,8 +8,8 @@ repository = { workspace = true } rust-version = { workspace = true } version = { workspace = true } readme = "README.md" -resolver = "2" publish = false +resolver = "2" [dev-dependencies] anyhow = { workspace = true } @@ -20,3 +20,9 @@ sov-modules-api = { path = "../../sov-modules-api" } sov-state = { path = "../../sov-state" } sov-rollup-interface = { path = "../../../rollup-interface" } sov-schema-db = { path = "../../../full-node/db/sov-schema-db" } +sov-data-generators = { path = "../../utils/sov-data-generators" } +sov-modules-stf-template = { path = "../../sov-modules-stf-template" } +sov-modules-macros = { path = "../../sov-modules-macros" } + +sov-chain-state = { path = "../sov-chain-state" } +sov-value-setter = { path = "../examples/sov-value-setter" } diff --git a/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs b/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs new file mode 100644 index 000000000..35c260fa6 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs @@ -0,0 +1,99 @@ +use sov_chain_state::{ChainState, ChainStateConfig}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::{ApplyBlobHooks, SlotHooks, TxHooks}; +use sov_modules_api::transaction::Transaction; +use sov_modules_api::{Context, PublicKey, Spec}; +use sov_modules_macros::{DefaultRuntime, DispatchCall, Genesis, MessageCodec}; +use sov_modules_stf_template::{AppTemplate, Runtime, SequencerOutcome}; +use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::mocks::{MockZkvm, TestBlob}; +use sov_rollup_interface::zk::ValidityCondition; +use sov_state::WorkingSet; +use sov_value_setter::{ValueSetter, ValueSetterConfig}; + +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +pub(crate) struct TestRuntime { + pub value_setter: ValueSetter, + pub chain_state: ChainState, +} + +impl TxHooks for TestRuntime { + type Context = C; + + fn pre_dispatch_tx_hook( + &self, + tx: &Transaction, + _working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result<::Address> { + Ok(tx.pub_key().to_address()) + } + + fn post_dispatch_tx_hook( + &self, + _tx: &Transaction, + _working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } +} + +impl ApplyBlobHooks for TestRuntime { + type Context = C; + type BlobResult = SequencerOutcome; + + fn begin_blob_hook( + &self, + _blob: &mut impl BlobReaderTrait, + _working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn end_blob_hook( + &self, + _result: Self::BlobResult, + _working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } +} + +impl SlotHooks for TestRuntime { + type Context = C; + + fn begin_slot_hook( + &self, + slot_data: &impl sov_rollup_interface::services::da::SlotData, + working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + self.chain_state.begin_slot_hook(slot_data, working_set) + } + + fn end_slot_hook(&self, _working_set: &mut WorkingSet<::Storage>) {} +} + +impl Runtime for TestRuntime {} + +pub(crate) fn create_demo_genesis_config( + admin: ::Address, +) -> GenesisConfig { + let value_setter_config = ValueSetterConfig { admin }; + let chain_state_config = ChainStateConfig { + initial_slot_height: 0, + }; + GenesisConfig::new(value_setter_config, chain_state_config) +} + +/// Clones the [`AppTemplate`]'s [`Storage`] and extract the underlying [`WorkingSet`] +pub(crate) fn get_working_set( + app_template: &AppTemplate< + C, + Cond, + MockZkvm, + TestRuntime, + TestBlob<::Address>, + >, +) -> WorkingSet<::Storage> { + WorkingSet::new(app_template.current_storage.clone()) +} diff --git a/module-system/module-implementations/integration-tests/src/chain_state/mod.rs b/module-system/module-implementations/integration-tests/src/chain_state/mod.rs new file mode 100644 index 000000000..8018c49a3 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/mod.rs @@ -0,0 +1,3 @@ +pub mod helpers; + +pub mod tests; diff --git a/module-system/module-implementations/integration-tests/src/chain_state/tests.rs b/module-system/module-implementations/integration-tests/src/chain_state/tests.rs new file mode 100644 index 000000000..a27974aa6 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/tests.rs @@ -0,0 +1,172 @@ +use sov_chain_state::{StateTransitionId, TransitionInProgress}; +use sov_data_generators::value_setter_data::ValueSetterMessages; +use sov_data_generators::{has_tx_events, new_test_blob_from_batch, MessageGenerator}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::Spec; +use sov_modules_stf_template::{AppTemplate, SequencerOutcome}; +use sov_rollup_interface::mocks::{ + MockZkvm, TestBlob, TestBlock, TestBlockHeader, TestHash, TestValidityCond, +}; +use sov_rollup_interface::stf::StateTransitionFunction; +use sov_state::{ProverStorage, Storage}; + +use crate::chain_state::helpers::{create_demo_genesis_config, get_working_set, TestRuntime}; + +type C = DefaultContext; + +/// This test generates a new mock rollup having a simple value setter module +/// with an associated chain state, and checks that the height, the genesis hash +/// and the state transitions are correctly stored and updated. +#[test] +fn test_simple_value_setter_with_chain_state() { + // Build an app template with the module configurations + let runtime = TestRuntime::default(); + + let tmpdir = tempfile::tempdir().unwrap(); + + let storage: ProverStorage = + ProverStorage::with_path(tmpdir.path()).unwrap(); + + let mut app_template = AppTemplate::< + C, + TestValidityCond, + MockZkvm, + TestRuntime, + TestBlob<::Address>, + >::new(storage, runtime); + + let value_setter_messages = ValueSetterMessages::default(); + let value_setter = value_setter_messages.create_raw_txs::>(); + + let admin_pub_key = value_setter_messages.messages[0].admin.default_address(); + + // Genesis + let init_root_hash = app_template.init_chain(create_demo_genesis_config(admin_pub_key)); + + const MOCK_SEQUENCER_DA_ADDRESS: [u8; 32] = [1_u8; 32]; + + let blob = new_test_blob_from_batch( + sov_modules_stf_template::Batch { txs: value_setter }, + &MOCK_SEQUENCER_DA_ADDRESS, + [2; 32], + ); + + let slot_data: TestBlock = TestBlock { + curr_hash: [10; 32], + header: TestBlockHeader { + prev_hash: TestHash([0; 32]), + }, + height: 0, + validity_cond: TestValidityCond::default(), + }; + + // Computes the initial working set + let mut working_set = get_working_set(&app_template); + + // Check the slot height before apply slot + let new_height_storage: u64 = app_template + .runtime + .chain_state + .get_slot_height(&mut working_set); + + assert_eq!(new_height_storage, 0, "The initial height was not computed"); + + let result = app_template.apply_slot(Default::default(), &slot_data, &mut [blob.clone()]); + + assert_eq!(1, result.batch_receipts.len()); + let apply_blob_outcome = result.batch_receipts[0].clone(); + assert_eq!( + SequencerOutcome::Rewarded(0), + apply_blob_outcome.inner, + "Sequencer execution should have succeeded but failed " + ); + + // Computes the new working set after slot application + let mut working_set = get_working_set(&app_template); + let chain_state_ref = &app_template.runtime.chain_state; + + // Get the new state root hash + let new_root_hash = app_template + .current_storage + .get_state_root(&Default::default()); + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state_ref.get_genesis_hash(&mut working_set).unwrap(); + + assert_eq!(stored_root, init_root_hash.0, "Root hashes don't match"); + + // Check the slot height + let new_height_storage: u64 = chain_state_ref.get_slot_height(&mut working_set); + + assert_eq!(new_height_storage, 1, "The new height did not update"); + + // Check the tx in progress + let new_tx_in_progress: TransitionInProgress = chain_state_ref + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([10; 32], TestValidityCond::default()), + "The new transition has not been correctly stored" + ); + + assert!(has_tx_events(&apply_blob_outcome),); + + // We apply a new transaction with the same values + let new_slot_data: TestBlock = TestBlock { + curr_hash: [20; 32], + header: TestBlockHeader { + prev_hash: TestHash([10; 32]), + }, + height: 1, + validity_cond: TestValidityCond::default(), + }; + + let result = app_template.apply_slot(Default::default(), &new_slot_data, &mut [blob]); + + assert_eq!(1, result.batch_receipts.len()); + let apply_blob_outcome = result.batch_receipts[0].clone(); + assert_eq!( + SequencerOutcome::Rewarded(0), + apply_blob_outcome.inner, + "Sequencer execution should have succeeded but failed " + ); + + // Computes the new working set after slot application + let mut working_set = get_working_set(&app_template); + let chain_state_ref = &app_template.runtime.chain_state; + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state_ref.get_genesis_hash(&mut working_set).unwrap(); + + assert_eq!(stored_root, init_root_hash.0, "Root hashes don't match"); + + // Check the slot height + let new_height_storage: u64 = chain_state_ref.get_slot_height(&mut working_set); + assert_eq!(new_height_storage, 2, "The new height did not update"); + + // Check the tx in progress + let new_tx_in_progress: TransitionInProgress = chain_state_ref + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([20; 32], TestValidityCond::default()), + "The new transition has not been correctly stored" + ); + + let last_tx_stored: StateTransitionId = chain_state_ref + .get_historical_transitions(1, &mut working_set) + .unwrap(); + + assert_eq!( + last_tx_stored, + StateTransitionId::new( + [10; 32], + new_root_hash.unwrap(), + TestValidityCond::default() + ) + ); +} diff --git a/module-system/module-implementations/integration-tests/src/lib.rs b/module-system/module-implementations/integration-tests/src/lib.rs new file mode 100644 index 000000000..43b899ec3 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/lib.rs @@ -0,0 +1,5 @@ +#[cfg(test)] +mod chain_state; + +#[cfg(test)] +mod nested_modules; diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs b/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs new file mode 100644 index 000000000..70122ab06 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs @@ -0,0 +1,82 @@ +use sov_modules_api::{Context, ModuleInfo}; +use sov_state::{StateMap, StateValue, WorkingSet}; + +pub mod module_a { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleA { + #[address] + pub address_module_a: C::Address, + + #[state] + pub(crate) state_1_a: StateMap, + + #[state] + pub(crate) state_2_a: StateValue, + } + + impl ModuleA { + pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { + working_set.add_event("module A", "update"); + self.state_1_a + .set(&key.to_owned(), &value.to_owned(), working_set); + self.state_2_a.set(&value.to_owned(), working_set) + } + } +} + +pub mod module_b { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleB { + #[address] + pub address_module_b: C::Address, + + #[state] + state_1_b: StateMap, + + #[module] + pub(crate) mod_1_a: module_a::ModuleA, + } + + impl ModuleB { + pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { + working_set.add_event("module B", "update"); + self.state_1_b + .set(&key.to_owned(), &value.to_owned(), working_set); + self.mod_1_a.update("key_from_b", value, working_set); + } + } +} + +pub(crate) mod module_c { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleC { + #[address] + pub address: C::Address, + + #[module] + pub(crate) mod_1_a: module_a::ModuleA, + + #[module] + mod_1_b: module_b::ModuleB, + } + + impl ModuleC { + pub fn execute( + &mut self, + key: &str, + value: &str, + working_set: &mut WorkingSet, + ) { + working_set.add_event("module C", "execute"); + self.mod_1_a.update(key, value, working_set); + self.mod_1_b.update(key, value, working_set); + self.mod_1_a.update(key, value, working_set); + } + } +} diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs b/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs new file mode 100644 index 000000000..e68e16bdf --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs @@ -0,0 +1,4 @@ +#[allow(dead_code)] +pub mod helpers; + +pub mod tests; diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs b/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs new file mode 100644 index 000000000..5c5efc8f7 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs @@ -0,0 +1,94 @@ +use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; +use sov_modules_api::{Context, Prefix}; +use sov_rollup_interface::stf::Event; +use sov_state::{ProverStorage, StateMap, Storage, WorkingSet, ZkStorage}; + +use super::helpers::module_c; + +#[test] +fn nested_module_call_test() { + let tmpdir = tempfile::tempdir().unwrap(); + let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(native_storage.clone()); + + // Test the `native` execution. + { + execute_module_logic::(&mut working_set); + test_state_update::(&mut working_set); + } + assert_eq!( + working_set.events(), + &vec![ + Event::new("module C", "execute"), + Event::new("module A", "update"), + Event::new("module B", "update"), + Event::new("module A", "update"), + Event::new("module A", "update"), + ] + ); + + let (log, witness) = working_set.checkpoint().freeze(); + native_storage + .validate_and_commit(log, &witness) + .expect("State update is valid"); + + // Test the `zk` execution. + { + let zk_storage = ZkStorage::new([0u8; 32]); + let working_set = &mut WorkingSet::with_witness(zk_storage, witness); + execute_module_logic::(working_set); + test_state_update::(working_set); + } +} + +fn execute_module_logic(working_set: &mut WorkingSet) { + let module = &mut module_c::ModuleC::::default(); + module.execute("some_key", "some_value", working_set); +} + +fn test_state_update(working_set: &mut WorkingSet) { + let module = as Default>::default(); + + let expected_value = "some_value".to_owned(); + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_a", + "ModuleA", + "state_1_a", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_b", + "ModuleB", + "state_1_b", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_a", + "ModuleA", + "state_1_a", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let value = module.mod_1_a.state_2_a.get(working_set).unwrap(); + assert_eq!(expected_value, value); + } +} diff --git a/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs b/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs deleted file mode 100644 index f8774ed03..000000000 --- a/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs +++ /dev/null @@ -1,160 +0,0 @@ -use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; -use sov_modules_api::{Context, ModuleInfo, Prefix}; -use sov_rollup_interface::stf::Event; -use sov_state::{ProverStorage, StateMap, StateValue, Storage, WorkingSet, ZkStorage}; - -pub mod module_a { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleA { - #[address] - pub address_module_a: C::Address, - - #[state] - pub(crate) state_1_a: StateMap, - - #[state] - pub(crate) state_2_a: StateValue, - } - - impl ModuleA { - pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { - working_set.add_event("module A", "update"); - self.state_1_a - .set(&key.to_owned(), &value.to_owned(), working_set); - self.state_2_a.set(&value.to_owned(), working_set) - } - } -} - -pub mod module_b { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleB { - #[address] - pub address_module_b: C::Address, - - #[state] - state_1_b: StateMap, - - #[module] - pub(crate) mod_1_a: module_a::ModuleA, - } - - impl ModuleB { - pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { - working_set.add_event("module B", "update"); - self.state_1_b - .set(&key.to_owned(), &value.to_owned(), working_set); - self.mod_1_a.update("key_from_b", value, working_set); - } - } -} - -mod module_c { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleC { - #[address] - pub address: C::Address, - - #[module] - pub(crate) mod_1_a: module_a::ModuleA, - - #[module] - mod_1_b: module_b::ModuleB, - } - - impl ModuleC { - pub fn execute( - &mut self, - key: &str, - value: &str, - working_set: &mut WorkingSet, - ) { - working_set.add_event("module C", "execute"); - self.mod_1_a.update(key, value, working_set); - self.mod_1_b.update(key, value, working_set); - self.mod_1_a.update(key, value, working_set); - } - } -} - -#[test] -fn nested_module_call_test() { - let tmpdir = tempfile::tempdir().unwrap(); - let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); - let mut working_set = WorkingSet::new(native_storage.clone()); - - // Test the `native` execution. - { - execute_module_logic::(&mut working_set); - test_state_update::(&mut working_set); - } - assert_eq!( - working_set.events(), - &vec![ - Event::new("module C", "execute"), - Event::new("module A", "update"), - Event::new("module B", "update"), - Event::new("module A", "update"), - Event::new("module A", "update"), - ] - ); - - let (log, witness) = working_set.checkpoint().freeze(); - native_storage - .validate_and_commit(log, &witness) - .expect("State update is valid"); - - // Test the `zk` execution. - { - let zk_storage = ZkStorage::new([0u8; 32]); - let working_set = &mut WorkingSet::with_witness(zk_storage, witness); - execute_module_logic::(working_set); - test_state_update::(working_set); - } -} - -fn execute_module_logic(working_set: &mut WorkingSet) { - let module = &mut module_c::ModuleC::::default(); - module.execute("some_key", "some_value", working_set); -} - -fn test_state_update(working_set: &mut WorkingSet) { - let module = as Default>::default(); - - let expected_value = "some_value".to_owned(); - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_a", "ModuleA", "state_1_a"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_b", "ModuleB", "state_1_b"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_a", "ModuleA", "state_1_a"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let value = module.mod_1_a.state_2_a.get(working_set).unwrap(); - assert_eq!(expected_value, value); - } -} diff --git a/module-system/module-implementations/module-template/src/tests.rs b/module-system/module-implementations/module-template/src/tests.rs index e58241478..ac5a16d22 100644 --- a/module-system/module-implementations/module-template/src/tests.rs +++ b/module-system/module-implementations/module-template/src/tests.rs @@ -12,9 +12,10 @@ fn test_value_setter() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let admin = Address::from([1; 32]); // Test Native-Context + #[cfg(feature = "native")] { let config = ExampleModuleConfig {}; - let context = DefaultContext::new(admin.clone()); + let context = DefaultContext::new(admin); test_value_setter_helper(context, &config, &mut working_set); } diff --git a/module-system/module-implementations/sov-accounts/src/tests.rs b/module-system/module-implementations/sov-accounts/src/tests.rs index 31471f096..c9b236b83 100644 --- a/module-system/module-implementations/sov-accounts/src/tests.rs +++ b/module-system/module-implementations/sov-accounts/src/tests.rs @@ -49,7 +49,7 @@ fn test_update_account() { let sender = priv_key.pub_key(); let sender_addr = sender.to_address::<::Address>(); - let sender_context = C::new(sender_addr.clone()); + let sender_context = C::new(sender_addr); // Test new account creation { @@ -142,7 +142,7 @@ fn test_get_account_after_pub_key_update() { let sender_1 = DefaultPrivateKey::generate().pub_key(); let sender_1_addr = sender_1.to_address::<::Address>(); - let sender_context_1 = C::new(sender_1_addr.clone()); + let sender_context_1 = C::new(sender_1_addr); accounts .create_default_account(sender_1, native_working_set) diff --git a/module-system/module-implementations/sov-bank/tests/burn_test.rs b/module-system/module-implementations/sov-bank/tests/burn_test.rs index 9f13d9373..757734759 100644 --- a/module-system/module-implementations/sov-bank/tests/burn_test.rs +++ b/module-system/module-implementations/sov-bank/tests/burn_test.rs @@ -21,9 +21,9 @@ fn burn_deployed_tokens() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let sender_address = generate_address("just_sender"); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let minter_address = generate_address("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -36,8 +36,8 @@ fn burn_deployed_tokens() { salt, token_name, initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; bank.call(mint_message, &minter_context, &mut working_set) .expect("Failed to mint token"); @@ -45,14 +45,13 @@ fn burn_deployed_tokens() { assert!(working_set.events().is_empty()); let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let previous_total_supply = query_total_supply(&mut working_set); @@ -64,7 +63,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: burn_amount, - token_address: token_address.clone(), + token_address, }, }; @@ -74,7 +73,7 @@ fn burn_deployed_tokens() { let current_total_supply = query_total_supply(&mut working_set); assert_eq!(Some(initial_balance - burn_amount), current_total_supply); - let minter_balance = query_user_balance(minter_address.clone(), &mut working_set); + let minter_balance = query_user_balance(minter_address, &mut working_set); assert_eq!(Some(initial_balance - burn_amount), minter_balance); let previous_total_supply = current_total_supply; @@ -110,14 +109,14 @@ fn burn_deployed_tokens() { let burn_zero_message = CallMessage::Burn { coins: Coins { amount: 0, - token_address: token_address.clone(), + token_address, }, }; bank.call(burn_zero_message, &minter_context, &mut working_set) .expect("Failed to burn token"); assert!(working_set.events().is_empty()); - let minter_balance_after = query_user_balance(minter_address.clone(), &mut working_set); + let minter_balance_after = query_user_balance(minter_address, &mut working_set); assert_eq!(minter_balance, minter_balance_after); // --- @@ -125,7 +124,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: initial_balance + 10, - token_address: token_address.clone(), + token_address, }, }; @@ -156,7 +155,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -193,25 +192,25 @@ fn burn_initial_tokens() { &bank_config.tokens[0].token_name, bank_config.tokens[0].salt, ); - let sender_address = bank_config.tokens[0].address_and_balances[0].0.clone(); + let sender_address = bank_config.tokens[0].address_and_balances[0].0; let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; - let balance_before = query_user_balance(sender_address.clone(), &mut working_set); + let balance_before = query_user_balance(sender_address, &mut working_set); assert_eq!(Some(initial_balance), balance_before); let burn_amount = 10; let burn_message = CallMessage::Burn { coins: Coins { amount: burn_amount, - token_address: token_address.clone(), + token_address, }, }; - let context = C::new(sender_address.clone()); + let context = C::new(sender_address); bank.call(burn_message, &context, &mut working_set) .expect("Failed to burn token"); assert!(working_set.events().is_empty()); diff --git a/module-system/module-implementations/sov-bank/tests/create_token_test.rs b/module-system/module-implementations/sov-bank/tests/create_token_test.rs index 0f7838595..b75abcf54 100644 --- a/module-system/module-implementations/sov-bank/tests/create_token_test.rs +++ b/module-system/module-implementations/sov-bank/tests/create_token_test.rs @@ -1,5 +1,5 @@ use sov_bank::{get_token_address, Bank, CallMessage}; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Context, Module}; use sov_state::{ProverStorage, WorkingSet}; @@ -16,7 +16,7 @@ fn initial_and_deployed_token() { bank.genesis(&bank_config, &mut working_set).unwrap(); let sender_address = generate_address::("sender"); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let minter_address = generate_address::("minter"); let initial_balance = 500; let token_name = "Token1".to_owned(); @@ -26,8 +26,8 @@ fn initial_and_deployed_token() { salt, token_name, initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; bank.call(create_token_message, &sender_context, &mut working_set) @@ -35,8 +35,7 @@ fn initial_and_deployed_token() { assert!(working_set.events().is_empty()); - let sender_balance = - bank.get_balance_of(sender_address, token_address.clone(), &mut working_set); + let sender_balance = bank.get_balance_of(sender_address, token_address, &mut working_set); assert!(sender_balance.is_none()); let minter_balance = bank.get_balance_of(minter_address, token_address, &mut working_set); diff --git a/module-system/module-implementations/sov-bank/tests/freeze_test.rs b/module-system/module-implementations/sov-bank/tests/freeze_test.rs index 030b5b433..a2a42b454 100644 --- a/module-system/module-implementations/sov-bank/tests/freeze_test.rs +++ b/module-system/module-implementations/sov-bank/tests/freeze_test.rs @@ -1,7 +1,7 @@ use helpers::C; use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins, TotalSupplyResponse}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -18,7 +18,7 @@ fn freeze_token() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let minter_address = generate_address::("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -31,8 +31,8 @@ fn freeze_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -42,9 +42,7 @@ fn freeze_token() { // ----- // Freeze - let freeze_message = CallMessage::Freeze { - token_address: token_address.clone(), - }; + let freeze_message = CallMessage::Freeze { token_address }; let _freeze = bank .call(freeze_message, &minter_context, &mut working_set) @@ -53,9 +51,7 @@ fn freeze_token() { // ---- // Try to freeze an already frozen token - let freeze_message = CallMessage::Freeze { - token_address: token_address.clone(), - }; + let freeze_message = CallMessage::Freeze { token_address }; let freeze = bank.call(freeze_message, &minter_context, &mut working_set); assert!(freeze.is_err()); @@ -84,8 +80,8 @@ fn freeze_token() { salt, token_name: token_name_2.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -95,9 +91,9 @@ fn freeze_token() { // Try to freeze with a non authorized minter let unauthorized_address = generate_address::("unauthorized_address"); - let unauthorized_context = C::new(unauthorized_address.clone()); + let unauthorized_context = C::new(unauthorized_address); let freeze_message = CallMessage::Freeze { - token_address: token_address_2.clone(), + token_address: token_address_2, }; let freeze = bank.call(freeze_message, &unauthorized_context, &mut working_set); @@ -128,9 +124,9 @@ fn freeze_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let query_total_supply = |token_address: Address, @@ -166,9 +162,9 @@ fn freeze_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address_2.clone(), + token_address: token_address_2, }, - minter_address: minter_address.clone(), + minter_address, }; let _minted = bank @@ -176,7 +172,7 @@ fn freeze_token() { .expect("Failed to mint token"); assert!(working_set.events().is_empty()); - let total_supply = query_total_supply(token_address_2.clone(), &mut working_set); + let total_supply = query_total_supply(token_address_2, &mut working_set); assert_eq!(Some(initial_balance + mint_amount), total_supply); let query_user_balance = diff --git a/module-system/module-implementations/sov-bank/tests/helpers/mod.rs b/module-system/module-implementations/sov-bank/tests/helpers/mod.rs index 485fc9138..d6de1abb4 100644 --- a/module-system/module-implementations/sov-bank/tests/helpers/mod.rs +++ b/module-system/module-implementations/sov-bank/tests/helpers/mod.rs @@ -1,6 +1,6 @@ use sov_bank::{BankConfig, TokenConfig}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address as gen_address_generic; +use sov_modules_api::utils::generate_address as gen_address_generic; use sov_modules_api::Address; pub type C = DefaultContext; diff --git a/module-system/module-implementations/sov-bank/tests/mint_test.rs b/module-system/module-implementations/sov-bank/tests/mint_test.rs index 7a38e1d58..c4ae01e1c 100644 --- a/module-system/module-implementations/sov-bank/tests/mint_test.rs +++ b/module-system/module-implementations/sov-bank/tests/mint_test.rs @@ -1,6 +1,6 @@ use helpers::C; use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins, TotalSupplyResponse}; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -17,7 +17,7 @@ fn mint_token() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let minter_address = generate_address::("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -30,8 +30,8 @@ fn mint_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -48,10 +48,10 @@ fn mint_token() { let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; - let previous_total_supply = query_total_supply(token_address.clone(), &mut working_set); + let previous_total_supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(initial_balance), previous_total_supply); // ----- @@ -61,9 +61,9 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank @@ -71,20 +71,20 @@ fn mint_token() { .expect("Failed to mint token"); assert!(working_set.events().is_empty()); - let total_supply = query_total_supply(token_address.clone(), &mut working_set); + let total_supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(initial_balance + mint_amount), total_supply); // check user balance after minting - let balance = query_user_balance(new_holder.clone(), &mut working_set); + let balance = query_user_balance(new_holder, &mut working_set); assert_eq!(Some(10), balance); // check original token creation balance - let bal = query_user_balance(minter_address.clone(), &mut working_set); + let bal = query_user_balance(minter_address, &mut working_set); assert_eq!(Some(100), bal); // Mint with an un-authorized user let unauthorized_address = generate_address::("unauthorized_address"); - let unauthorized_context = C::new(unauthorized_address.clone()); + let unauthorized_context = C::new(unauthorized_address); let unauthorized_mint = bank.call(mint_message, &unauthorized_context, &mut working_set); assert!(unauthorized_mint.is_err()); @@ -124,11 +124,8 @@ fn mint_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![ - authorized_minter_address_1.clone(), - authorized_minter_address_2.clone(), - ], + minter_address, + authorized_minters: vec![authorized_minter_address_1, authorized_minter_address_2], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -142,9 +139,9 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call(mint_message, &minter_context, &mut working_set); @@ -174,32 +171,32 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank .call(mint_message, &authorized_minter_2_context, &mut working_set) .expect("Failed to mint token"); - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert!(working_set.events().is_empty()); assert_eq!(Some(110), supply); // Try to mint new token with authorized sender 1 - let authorized_minter_1_context = C::new(authorized_minter_address_1.clone()); + let authorized_minter_1_context = C::new(authorized_minter_address_1); let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank .call(mint_message, &authorized_minter_1_context, &mut working_set) .expect("Failed to mint token"); - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert!(working_set.events().is_empty()); assert_eq!(Some(120), supply); @@ -207,9 +204,9 @@ fn mint_token() { let overflow_mint_message = CallMessage::Mint { coins: Coins { amount: u64::MAX, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call( @@ -238,7 +235,7 @@ fn mint_token() { message_2, ); // assert that the supply is unchanged after the overflow mint - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(120), supply); // Overflow test 2 - total supply @@ -246,9 +243,9 @@ fn mint_token() { let overflow_mint_message = CallMessage::Mint { coins: Coins { amount: u64::MAX - 1, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call( diff --git a/module-system/module-implementations/sov-bank/tests/transfer_test.rs b/module-system/module-implementations/sov-bank/tests/transfer_test.rs index 662804e6e..c89fd9cc0 100644 --- a/module-system/module-implementations/sov-bank/tests/transfer_test.rs +++ b/module-system/module-implementations/sov-bank/tests/transfer_test.rs @@ -5,7 +5,7 @@ use sov_bank::{ get_genesis_token_address, get_token_address, Bank, BankConfig, CallMessage, Coins, TotalSupplyResponse, }; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -26,38 +26,37 @@ fn transfer_initial_token() { &bank_config.tokens[0].token_name, bank_config.tokens[0].salt, ); - let sender_address = bank_config.tokens[0].address_and_balances[0].0.clone(); - let receiver_address = bank_config.tokens[0].address_and_balances[1].0.clone(); + let sender_address = bank_config.tokens[0].address_and_balances[0].0; + let receiver_address = bank_config.tokens[0].address_and_balances[1].0; assert_ne!(sender_address, receiver_address); // Preparation let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_some()); assert_eq!(Some(initial_balance), sender_balance_before); assert_eq!(sender_balance_before, receiver_balance_before); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); // Transfer happy test { let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: transfer_amount, - token_address: token_address.clone(), + token_address, }, }; @@ -65,8 +64,8 @@ fn transfer_initial_token() { .expect("Transfer call failed"); assert!(working_set.events().is_empty()); - let sender_balance_after = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_after = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_after = query_user_balance(sender_address, &mut working_set); + let receiver_balance_after = query_user_balance(receiver_address, &mut working_set); assert_eq!( Some(initial_balance - transfer_amount), @@ -83,10 +82,10 @@ fn transfer_initial_token() { // Not enough balance { let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: initial_balance + 1, - token_address: token_address.clone(), + token_address, }, }; @@ -128,10 +127,10 @@ fn transfer_initial_token() { let token_address = get_token_address::(&token_name, sender_address.as_ref(), salt); let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -156,19 +155,18 @@ fn transfer_initial_token() { // Sender does not exist { let unknown_sender = generate_address::("non_existing_sender"); - let unknown_sender_context = C::new(unknown_sender.clone()); + let unknown_sender_context = C::new(unknown_sender); - let sender_balance = query_user_balance(unknown_sender.clone(), &mut working_set); + let sender_balance = query_user_balance(unknown_sender, &mut working_set); assert!(sender_balance.is_none()); - let receiver_balance_before = - query_user_balance(receiver_address.clone(), &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -211,15 +209,14 @@ fn transfer_initial_token() { { let unknown_receiver = generate_address::("non_existing_receiver"); - let receiver_balance_before = - query_user_balance(unknown_receiver.clone(), &mut working_set); + let receiver_balance_before = query_user_balance(unknown_receiver, &mut working_set); assert!(receiver_balance_before.is_none()); let transfer_message = CallMessage::Transfer { - to: unknown_receiver.clone(), + to: unknown_receiver, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -234,14 +231,14 @@ fn transfer_initial_token() { // Sender equals receiver { let total_supply_before = query_total_supply(&mut working_set); - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); assert!(sender_balance_before.is_some()); let transfer_message = CallMessage::Transfer { - to: sender_address.clone(), + to: sender_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; bank.call(transfer_message, &sender_context, &mut working_set) @@ -276,30 +273,29 @@ fn transfer_deployed_token() { // Preparation let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_none()); assert!(sender_balance_before.is_none()); assert!(receiver_balance_before.is_none()); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let mint_message = CallMessage::CreateToken { salt, token_name, initial_balance, - minter_address: sender_address.clone(), - authorized_minters: vec![sender_address.clone()], + minter_address: sender_address, + authorized_minters: vec![sender_address], }; bank.call(mint_message, &sender_context, &mut working_set) .expect("Failed to mint token"); @@ -308,18 +304,18 @@ fn transfer_deployed_token() { let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_some()); - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); assert_eq!(Some(initial_balance), sender_balance_before); assert!(receiver_balance_before.is_none()); let transfer_amount = 15; let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: transfer_amount, - token_address: token_address.clone(), + token_address, }, }; diff --git a/module-system/module-implementations/sov-chain-state/Cargo.toml b/module-system/module-implementations/sov-chain-state/Cargo.toml new file mode 100644 index 000000000..748cea796 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "sov-chain-state" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +publish = false +resolver = "2" + +[dependencies] +anyhow = { workspace = true } +sov-modules-api = { path = "../../sov-modules-api", default-features = false } +sov-modules-macros = { path = "../../sov-modules-macros" } +sov-state = { path = "../../sov-state", default-features = false } +sov-rollup-interface = { path = "../../../rollup-interface" } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } +borsh = { workspace = true, features = ["rc"] } + +jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } + +[dev-dependencies] +sov-modules-api = { path = "../../sov-modules-api" } +sov-bank = { path = "../sov-bank", default-features = false } +sov-value-setter = { path = "../examples/sov-value-setter" } +sov-state = { path = "../../sov-state", default-features = false } +sov-modules-stf-template = { path = "../../sov-modules-stf-template" } +sov-data-generators = { path = "../../utils/sov-data-generators" } +tempfile = { workspace = true } + +[features] +default = ["native"] +serde = ["dep:serde", "dep:serde_json"] +native = ["serde", "sov-modules-api/native", "dep:jsonrpsee"] diff --git a/module-system/module-implementations/sov-chain-state/README.md b/module-system/module-implementations/sov-chain-state/README.md new file mode 100644 index 000000000..a4cbbb2fb --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/README.md @@ -0,0 +1,3 @@ +# Sov Chain State + +This module provides access to the current chain state (block height, block hash, etc.) diff --git a/module-system/module-implementations/sov-chain-state/src/call.rs b/module-system/module-implementations/sov-chain-state/src/call.rs new file mode 100644 index 000000000..38e312664 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/call.rs @@ -0,0 +1,31 @@ +use borsh::{BorshDeserialize, BorshSerialize}; +use sov_rollup_interface::zk::ValidityCondition; +use sov_state::WorkingSet; + +use crate::{ChainState, StateTransitionId}; + +impl< + Ctx: sov_modules_api::Context, + Cond: ValidityCondition + BorshSerialize + BorshDeserialize, + > ChainState +{ + /// Increment the current slot height + pub(crate) fn increment_slot_height(&self, working_set: &mut WorkingSet) { + let current_height = self + .slot_height + .get(working_set) + .expect("Block height must be initialized"); + self.slot_height.set(&(current_height + 1), working_set); + } + + /// Store the previous state transition + pub(crate) fn store_state_transition( + &self, + height: u64, + transition: StateTransitionId, + working_set: &mut WorkingSet, + ) { + self.historical_transitions + .set(&height, &transition, working_set); + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/genesis.rs b/module-system/module-implementations/sov-chain-state/src/genesis.rs new file mode 100644 index 000000000..1262079c1 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/genesis.rs @@ -0,0 +1,17 @@ +use anyhow::Result; +use sov_rollup_interface::zk::ValidityCondition; +use sov_state::WorkingSet; + +use crate::ChainState; + +impl ChainState { + pub(crate) fn init_module( + &self, + config: &::Config, + working_set: &mut WorkingSet, + ) -> Result<()> { + self.slot_height + .set(&config.initial_slot_height, working_set); + Ok(()) + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/hooks.rs b/module-system/module-implementations/sov-chain-state/src/hooks.rs new file mode 100644 index 000000000..39b442916 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/hooks.rs @@ -0,0 +1,67 @@ +use sov_modules_api::hooks::SlotHooks; +use sov_modules_api::{Context, Spec}; +use sov_rollup_interface::services::da::SlotData; +use sov_rollup_interface::zk::ValidityCondition; +use sov_state::{Storage, WorkingSet}; + +use super::ChainState; +use crate::{StateTransitionId, TransitionInProgress}; + +impl SlotHooks for ChainState { + type Context = Ctx; + + fn begin_slot_hook( + &self, + slot: &impl SlotData, + working_set: &mut WorkingSet<::Storage>, + ) { + if self.genesis_hash.get(working_set).is_none() { + // The genesis hash is not set, hence this is the + // first transition right after the genesis block + self.genesis_hash.set( + &working_set + .backing() + .get_state_root(&Default::default()) + .expect("Should have a state root"), + working_set, + ) + } else { + let transition: StateTransitionId = { + let last_transition_in_progress = self + .in_progress_transition + .get(working_set) + .expect("There should always be a transition in progress"); + + StateTransitionId { + da_block_hash: last_transition_in_progress.da_block_hash, + post_state_root: working_set + .backing() + .get_state_root(&Default::default()) + .expect("Should have a state root"), + validity_condition: last_transition_in_progress.validity_condition, + } + }; + + self.store_state_transition( + self.slot_height + .get(working_set) + .expect("Block height must be set"), + transition, + working_set, + ); + } + + self.increment_slot_height(working_set); + let validity_condition = slot.validity_condition(); + + self.in_progress_transition.set( + &TransitionInProgress { + da_block_hash: slot.hash(), + validity_condition, + }, + working_set, + ); + } + + fn end_slot_hook(&self, _working_set: &mut WorkingSet<::Storage>) {} +} diff --git a/module-system/module-implementations/sov-chain-state/src/lib.rs b/module-system/module-implementations/sov-chain-state/src/lib.rs new file mode 100644 index 000000000..563e31eea --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/lib.rs @@ -0,0 +1,146 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] + +/// Contains the call methods used by the module +pub mod call; + +/// Genesis state configuration +pub mod genesis; + +/// Hook implementation for the module +pub mod hooks; + +#[cfg(test)] +pub mod tests; + +/// The query interface with the module +#[cfg(feature = "native")] +pub mod query; + +use borsh::{BorshDeserialize, BorshSerialize}; +use sov_modules_api::Error; +use sov_modules_macros::ModuleInfo; +use sov_rollup_interface::zk::{ValidityCondition, ValidityConditionChecker}; +use sov_state::WorkingSet; + +#[derive(BorshDeserialize, BorshSerialize, Clone, Debug, PartialEq, Eq)] +/// Structure that contains the information needed to represent a single state transition. +pub struct StateTransitionId { + da_block_hash: [u8; 32], + post_state_root: [u8; 32], + validity_condition: Cond, +} + +impl StateTransitionId { + /// Creates a new state transition. Only available for testing as we only want to create + /// new state transitions from existing [`TransitionInProgress`]. + pub fn new( + da_block_hash: [u8; 32], + post_state_root: [u8; 32], + validity_condition: Cond, + ) -> Self { + Self { + da_block_hash, + post_state_root, + validity_condition, + } + } +} + +impl StateTransitionId { + /// Compare the transition block hash and state root with the provided input couple. If + /// the pairs are equal, return [`true`]. + pub fn compare_hashes(&self, da_block_hash: &[u8; 32], post_state_root: &[u8; 32]) -> bool { + self.da_block_hash == *da_block_hash && self.post_state_root == *post_state_root + } + + /// Returns the post state root of a state transition + pub fn post_state_root(&self) -> [u8; 32] { + self.post_state_root + } + + /// Returns the da block hash of a state transition + pub fn da_block_hash(&self) -> [u8; 32] { + self.da_block_hash + } + + /// Checks the validity condition of a state transition + pub fn validity_condition_check>( + &self, + checker: &mut Checker, + ) -> Result<(), >::Error> { + checker.check(&self.validity_condition) + } +} + +#[derive(BorshDeserialize, BorshSerialize, Clone, Debug, PartialEq, Eq)] +/// Represents a transition in progress for the rollup. +pub struct TransitionInProgress { + da_block_hash: [u8; 32], + validity_condition: Cond, +} + +impl TransitionInProgress { + /// Creates a new transition in progress + pub fn new(da_block_hash: [u8; 32], validity_condition: Cond) -> Self { + Self { + da_block_hash, + validity_condition, + } + } +} + +/// A new module: +/// - Must derive `ModuleInfo` +/// - Must contain `[address]` field +/// - Can contain any number of ` #[state]` or `[module]` fields +#[derive(ModuleInfo)] +pub struct ChainState { + /// Address of the module. + #[address] + pub address: Ctx::Address, + + /// The current block height + #[state] + pub slot_height: sov_state::StateValue, + + /// A record of all previous state transitions which are available to the VM. + /// Currently, this includes *all* historical state transitions, but that may change in the future. + /// This state map is delayed by one transition. In other words - the transition that happens in time i + /// is stored during transition i+1. This is mainly due to the fact that this structure depends on the + /// rollup's root hash which is only stored once the transition has completed. + #[state] + pub historical_transitions: sov_state::StateMap>, + + /// The transition that is currently processed + #[state] + pub in_progress_transition: sov_state::StateValue>, + + /// The genesis root hash. + /// Set after the first transaction of the rollup is executed, using the `begin_slot` hook. + #[state] + pub genesis_hash: sov_state::StateValue<[u8; 32]>, +} + +/// Initial configuration of the chain state +pub struct ChainStateConfig { + /// Initial slot height + pub initial_slot_height: u64, +} + +impl sov_modules_api::Module + for ChainState +{ + type Context = Ctx; + + type Config = ChainStateConfig; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + // The initialization logic + Ok(self.init_module(config, working_set)?) + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/query.rs b/module-system/module-implementations/sov-chain-state/src/query.rs new file mode 100644 index 000000000..d84b93c2d --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/query.rs @@ -0,0 +1,45 @@ +use sov_rollup_interface::zk::ValidityCondition; +use sov_state::WorkingSet; + +use super::ChainState; +use crate::{StateTransitionId, TransitionInProgress}; + +#[derive(serde::Serialize, serde::Deserialize, Debug, Eq, PartialEq)] +/// Structure returned by the query methods. +pub struct Response { + /// Value returned by the queries + pub value: u64, +} + +impl ChainState { + /// Get the height of the current slot. + /// Panics if the slot height is not set + pub fn get_slot_height(&self, working_set: &mut WorkingSet) -> u64 { + self.slot_height + .get(working_set) + .expect("Slot height should be set at initialization") + } + + /// Return the genesis hash of the module. + pub fn get_genesis_hash(&self, working_set: &mut WorkingSet) -> Option<[u8; 32]> { + self.genesis_hash.get(working_set) + } + + /// Returns the transition in progress of the module. + pub fn get_in_progress_transition( + &self, + working_set: &mut WorkingSet, + ) -> Option> { + self.in_progress_transition.get(working_set) + } + + /// Returns the completed transition associated with the provided `transition_num`. + pub fn get_historical_transitions( + &self, + transition_num: u64, + working_set: &mut WorkingSet, + ) -> Option> { + self.historical_transitions + .get(&transition_num, working_set) + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/tests.rs b/module-system/module-implementations/sov-chain-state/src/tests.rs new file mode 100644 index 000000000..073792c4a --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/tests.rs @@ -0,0 +1,137 @@ +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::SlotHooks; +use sov_modules_api::Genesis; +use sov_rollup_interface::mocks::{TestBlock, TestBlockHeader, TestHash, TestValidityCond}; +use sov_state::{ProverStorage, Storage, WorkingSet}; + +use crate::{ChainState, ChainStateConfig, StateTransitionId, TransitionInProgress}; + +/// This simply tests that the chain_state reacts properly with the invocation of the `begin_slot` +/// hook. For more complete integration tests, feel free to have a look at the integration tests folder. +#[test] +fn test_simple_chain_state() { + // The initial height can be any value. + const INIT_HEIGHT: u64 = 10; + // Initialize the module. + let tmpdir = tempfile::tempdir().unwrap(); + + let storage: ProverStorage = + ProverStorage::with_path(tmpdir.path()).unwrap(); + + let mut working_set = WorkingSet::new(storage.clone()); + + let chain_state = ChainState::::default(); + let config = ChainStateConfig { + initial_slot_height: INIT_HEIGHT, + }; + + // Genesis, initialize and then commit the state + chain_state.genesis(&config, &mut working_set).unwrap(); + let (reads_writes, witness) = working_set.checkpoint().freeze(); + storage.validate_and_commit(reads_writes, &witness).unwrap(); + + // Computes the initial, post genesis, working set + let mut working_set = WorkingSet::new(storage.clone()); + + // Check the slot height before any changes to the state. + let initial_height: u64 = chain_state.get_slot_height(&mut working_set); + + assert_eq!( + initial_height, INIT_HEIGHT, + "The initial height was not computed" + ); + + // Then simulate a transaction execution: call the begin_slot hook on a mock slot_data. + let slot_data = TestBlock { + curr_hash: [1; 32], + header: TestBlockHeader { + prev_hash: TestHash([0; 32]), + }, + height: INIT_HEIGHT, + validity_cond: TestValidityCond { is_valid: true }, + }; + + chain_state.begin_slot_hook(&slot_data, &mut working_set); + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state.get_genesis_hash(&mut working_set).unwrap(); + let init_root_hash = storage.get_state_root(&Default::default()).unwrap(); + + assert_eq!(stored_root, init_root_hash, "Genesis hashes don't match"); + + // Check that the slot height have been updated + let new_height_storage: u64 = chain_state.get_slot_height(&mut working_set); + + assert_eq!( + new_height_storage, + INIT_HEIGHT + 1, + "The new height did not update" + ); + + // Check that the new state transition is being stored + let new_tx_in_progress: TransitionInProgress = chain_state + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([1; 32], TestValidityCond { is_valid: true }), + "The new transition has not been correctly stored" + ); + + // We now commit the new state (which updates the root hash) + let (reads_writes, witness) = working_set.checkpoint().freeze(); + storage.validate_and_commit(reads_writes, &witness).unwrap(); + let new_root_hash = storage.get_state_root(&Default::default()); + + // Computes the new working set + let mut working_set = WorkingSet::new(storage); + + // And we simulate a new slot application by calling the `begin_slot` hook. + let new_slot_data = TestBlock { + curr_hash: [2; 32], + header: TestBlockHeader { + prev_hash: TestHash([1; 32]), + }, + height: INIT_HEIGHT, + validity_cond: TestValidityCond { is_valid: false }, + }; + + chain_state.begin_slot_hook(&new_slot_data, &mut working_set); + + // Check that the slot height have been updated correctly + let new_height_storage: u64 = chain_state.get_slot_height(&mut working_set); + assert_eq!( + new_height_storage, + INIT_HEIGHT + 2, + "The new height did not update" + ); + + // Check the transition in progress + let new_tx_in_progress: TransitionInProgress = chain_state + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new( + [2; 32], + TestValidityCond { is_valid: false } + ), + "The new transition has not been correctly stored" + ); + + // Check the transition stored + let last_tx_stored: StateTransitionId = chain_state + .get_historical_transitions(INIT_HEIGHT + 1, &mut working_set) + .unwrap(); + + assert_eq!( + last_tx_stored, + StateTransitionId::new( + [1; 32], + new_root_hash.unwrap(), + TestValidityCond { is_valid: true } + ) + ); +} diff --git a/module-system/module-implementations/sov-prover-incentives/src/tests.rs b/module-system/module-implementations/sov-prover-incentives/src/tests.rs index 8e68cf74f..c6118247d 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/tests.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/tests.rs @@ -21,8 +21,8 @@ fn create_bank_config() -> (sov_bank::BankConfig, ::Address) { let token_config = sov_bank::TokenConfig { token_name: "InitialToken".to_owned(), - address_and_balances: vec![(prover_address.clone(), BOND_AMOUNT * 5)], - authorized_minters: vec![prover_address.clone()], + address_and_balances: vec![(prover_address, BOND_AMOUNT * 5)], + authorized_minters: vec![prover_address], salt: 2, }; @@ -54,7 +54,7 @@ fn setup( bonding_token_address: token_address, minimum_bond: BOND_AMOUNT, commitment_of_allowed_verifier_method: MockCodeCommitment([0u8; 32]), - initial_provers: vec![(prover_address.clone(), BOND_AMOUNT)], + initial_provers: vec![(prover_address, BOND_AMOUNT)], }; module @@ -72,7 +72,7 @@ fn test_burn_on_invalid_proof() { // Assert that the prover has the correct bond amount before processing the proof assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -80,7 +80,7 @@ fn test_burn_on_invalid_proof() { // Process an invalid proof { let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let proof = MockProof { program_id: MOCK_CODE_COMMITMENT, @@ -110,7 +110,7 @@ fn test_valid_proof() { // Assert that the prover has the correct bond amount before processing the proof assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -118,7 +118,7 @@ fn test_valid_proof() { // Process a valid proof { let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let proof = MockProof { program_id: MOCK_CODE_COMMITMENT, @@ -145,7 +145,7 @@ fn test_unbonding() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let (module, prover_address) = setup(&mut working_set); let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let token_address = module .bonding_token_address @@ -155,7 +155,7 @@ fn test_unbonding() { // Assert that the prover has bonded tokens assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -164,11 +164,7 @@ fn test_unbonding() { let initial_unlocked_balance = { module .bank - .get_balance_of( - prover_address.clone(), - token_address.clone(), - &mut working_set, - ) + .get_balance_of(prover_address, token_address, &mut working_set) .unwrap_or_default() }; @@ -180,7 +176,7 @@ fn test_unbonding() { // Assert that the prover no longer has bonded tokens assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, 0 ); @@ -202,7 +198,7 @@ fn test_prover_not_bonded() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let (module, prover_address) = setup(&mut working_set); let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; // Unbond the prover diff --git a/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs b/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs index 9072f9ace..27b7fd827 100644 --- a/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs +++ b/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs @@ -42,8 +42,8 @@ impl TestSequencer { working_set: &mut WorkingSet<::Storage>, ) -> RpcResult { self.bank.balance_of( - self.sequencer_config.seq_rollup_address.clone(), - self.sequencer_config.coins_to_lock.token_address.clone(), + self.sequencer_config.seq_rollup_address, + self.sequencer_config.coins_to_lock.token_address, working_set, ) } @@ -56,7 +56,7 @@ impl TestSequencer { ) -> RpcResult { self.bank.balance_of( user_address, - self.sequencer_config.coins_to_lock.token_address.clone(), + self.sequencer_config.coins_to_lock.token_address, working_set, ) } @@ -68,7 +68,7 @@ pub fn create_bank_config() -> (sov_bank::BankConfig, ::Address) { let token_config = sov_bank::TokenConfig { token_name: "InitialToken".to_owned(), address_and_balances: vec![ - (seq_address.clone(), INITIAL_BALANCE), + (seq_address, INITIAL_BALANCE), (generate_address(ANOTHER_SEQUENCER_KEY), INITIAL_BALANCE), (generate_address(UNKNOWN_SEQUENCER_KEY), INITIAL_BALANCE), (generate_address(LOW_FUND_KEY), 3), diff --git a/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs b/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs index 15662d088..a8ccd1513 100644 --- a/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs +++ b/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs @@ -34,10 +34,10 @@ fn test_registration_lifecycle() { let da_address = ANOTHER_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(ANOTHER_SEQUENCER_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let balance_before = test_sequencer - .query_balance(sequencer_address.clone(), working_set) + .query_balance(sequencer_address, working_set) .unwrap() .amount .unwrap(); @@ -57,7 +57,7 @@ fn test_registration_lifecycle() { .expect("Sequencer registration has failed"); let balance_after_registration = test_sequencer - .query_balance(sequencer_address.clone(), working_set) + .query_balance(sequencer_address, working_set) .unwrap() .amount .unwrap(); @@ -68,7 +68,7 @@ fn test_registration_lifecycle() { .sequencer_address(da_address.clone(), working_set) .unwrap(); assert_eq!( - Some(sequencer_address.clone()), + Some(sequencer_address), registry_response_after_registration.address ); @@ -104,7 +104,7 @@ fn test_registration_not_enough_funds() { let da_address = ANOTHER_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(LOW_FUND_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let register_message = CallMessage::Register { da_address }; let response = test_sequencer @@ -155,7 +155,7 @@ fn test_registration_second_time() { let da_address = GENESIS_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(GENESIS_SEQUENCER_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let register_message = CallMessage::Register { da_address }; let response = test_sequencer diff --git a/module-system/sov-modules-api/src/dispatch.rs b/module-system/sov-modules-api/src/dispatch.rs index 8432615c8..c9a2886cf 100644 --- a/module-system/sov-modules-api/src/dispatch.rs +++ b/module-system/sov-modules-api/src/dispatch.rs @@ -1,6 +1,6 @@ use sov_state::WorkingSet; -use crate::{CallResponse, Context, Error, Spec}; +use crate::{CallResponse, Context, Error, Module, Spec}; /// Methods from this trait should be called only once during the rollup deployment. pub trait Genesis { @@ -37,6 +37,12 @@ pub trait DispatchCall { fn module_address(&self, message: &Self::Decodable) -> &::Address; } +/// A trait that specifies how a runtime should encode the data for each module +pub trait EncodeCall { + /// The encoding function + fn encode_call(data: M::CallMessage) -> Vec; +} + /// A trait that needs to be implemented for a *runtime* to be used with the CLI wallet #[cfg(feature = "native")] pub trait CliWallet: DispatchCall { diff --git a/module-system/sov-modules-api/src/hooks.rs b/module-system/sov-modules-api/src/hooks.rs index 4e1e55695..299692059 100644 --- a/module-system/sov-modules-api/src/hooks.rs +++ b/module-system/sov-modules-api/src/hooks.rs @@ -1,4 +1,6 @@ use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::services::da::SlotData; +use sov_rollup_interface::zk::ValidityCondition; use sov_state::WorkingSet; use crate::transaction::Transaction; @@ -50,3 +52,16 @@ pub trait ApplyBlobHooks { working_set: &mut WorkingSet<::Storage>, ) -> anyhow::Result<()>; } + +/// Hooks that execute during the `StateTransitionFunction::begin_slot` and `end_slot` functions. +pub trait SlotHooks { + type Context: Context; + + fn begin_slot_hook( + &self, + slot_data: &impl SlotData, + working_set: &mut WorkingSet<::Storage>, + ); + + fn end_slot_hook(&self, working_set: &mut WorkingSet<::Storage>); +} diff --git a/module-system/sov-modules-api/src/lib.rs b/module-system/sov-modules-api/src/lib.rs index 806235ccf..d96fce88d 100644 --- a/module-system/sov-modules-api/src/lib.rs +++ b/module-system/sov-modules-api/src/lib.rs @@ -10,7 +10,6 @@ pub mod hooks; mod prefix; mod response; mod serde_address; -pub mod test_utils; #[cfg(test)] mod tests; pub mod transaction; @@ -46,7 +45,7 @@ use borsh::{BorshDeserialize, BorshSerialize}; pub use clap; #[cfg(feature = "native")] pub use dispatch::CliWallet; -pub use dispatch::{DispatchCall, Genesis}; +pub use dispatch::{DispatchCall, EncodeCall, Genesis}; pub use error::Error; pub use prefix::Prefix; pub use response::CallResponse; @@ -66,7 +65,7 @@ impl AsRef<[u8]> for Address { impl AddressTrait for Address {} #[cfg_attr(feature = "native", derive(schemars::JsonSchema))] -#[derive(PartialEq, Clone, Eq, borsh::BorshDeserialize, borsh::BorshSerialize, Hash)] +#[derive(PartialEq, Clone, Copy, Eq, borsh::BorshDeserialize, borsh::BorshSerialize, Hash)] pub struct Address { addr: [u8; 32], } diff --git a/module-system/sov-modules-api/src/test_utils.rs b/module-system/sov-modules-api/src/test_utils.rs deleted file mode 100644 index 199ef0c5e..000000000 --- a/module-system/sov-modules-api/src/test_utils.rs +++ /dev/null @@ -1,6 +0,0 @@ -use crate::{Context, Digest, Spec}; - -pub fn generate_address(key: &str) -> ::Address { - let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); - C::Address::from(hash) -} diff --git a/module-system/sov-modules-api/src/tests.rs b/module-system/sov-modules-api/src/tests.rs index 3b1517783..84085f5c0 100644 --- a/module-system/sov-modules-api/src/tests.rs +++ b/module-system/sov-modules-api/src/tests.rs @@ -71,11 +71,11 @@ fn test_sorting_modules() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_c = Module { address: Address::from([3; 32]), - dependencies: vec![module_a.address.clone(), module_b.address.clone()], + dependencies: vec![module_a.address, module_b.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = @@ -91,11 +91,11 @@ fn test_sorting_modules_missing_module() { let module_a_address = Address::from([1; 32]); let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a_address.clone()], + dependencies: vec![module_a_address], }; let module_c = Module { address: Address::from([3; 32]), - dependencies: vec![module_a_address, module_b.address.clone()], + dependencies: vec![module_a_address, module_b.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = @@ -117,15 +117,15 @@ fn test_sorting_modules_cycle() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_d = Module { address: Address::from([4; 32]), - dependencies: vec![module_e_address.clone()], + dependencies: vec![module_e_address], }; let module_e = Module { address: module_e_address, - dependencies: vec![module_a.address.clone(), module_d.address.clone()], + dependencies: vec![module_a.address, module_d.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = vec![ @@ -150,7 +150,7 @@ fn test_sorting_modules_duplicate() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_a2 = Module { address: Address::from([1; 32]), diff --git a/module-system/sov-modules-api/src/transaction.rs b/module-system/sov-modules-api/src/transaction.rs index 011a0d501..d94b8cdd0 100644 --- a/module-system/sov-modules-api/src/transaction.rs +++ b/module-system/sov-modules-api/src/transaction.rs @@ -1,10 +1,6 @@ #[cfg(feature = "native")] -use crate::default_context::DefaultContext; -#[cfg(feature = "native")] -use crate::default_signature::private_key::DefaultPrivateKey; +use crate::PrivateKey; use crate::{Context, Signature}; -#[cfg(feature = "native")] -use crate::{PrivateKey, Spec}; /// A Transaction object that is compatible with the module-system/sov-default-stf. #[derive(Debug, PartialEq, Eq, Clone, borsh::BorshDeserialize, borsh::BorshSerialize)] @@ -45,9 +41,9 @@ impl Transaction { } #[cfg(feature = "native")] -impl Transaction { +impl Transaction { /// New signed transaction. - pub fn new_signed_tx(priv_key: &DefaultPrivateKey, mut message: Vec, nonce: u64) -> Self { + pub fn new_signed_tx(priv_key: &C::PrivateKey, mut message: Vec, nonce: u64) -> Self { // Since we own the message already, try to add the serialized nonce in-place. // This lets us avoid a copy if the message vec has at least 8 bytes of extra capacity. let orignal_length = message.len(); @@ -69,9 +65,9 @@ impl Transaction { /// New transaction. pub fn new( - pub_key: ::PublicKey, + pub_key: C::PublicKey, message: Vec, - signature: ::Signature, + signature: C::Signature, nonce: u64, ) -> Self { Self { diff --git a/module-system/sov-modules-api/src/utils.rs b/module-system/sov-modules-api/src/utils.rs index bb5d969f2..7635c5001 100644 --- a/module-system/sov-modules-api/src/utils.rs +++ b/module-system/sov-modules-api/src/utils.rs @@ -1,5 +1,7 @@ use jsonrpsee::types::ErrorObjectOwned; +use crate::{Context, Digest, Spec}; + /// Creates an jsonrpsee ErrorObject pub fn to_jsonrpsee_error_object(err: impl ToString, message: &str) -> ErrorObjectOwned { ErrorObjectOwned::owned( @@ -8,3 +10,8 @@ pub fn to_jsonrpsee_error_object(err: impl ToString, message: &str) -> ErrorObje Some(err.to_string()), ) } + +pub fn generate_address(key: &str) -> ::Address { + let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); + C::Address::from(hash) +} diff --git a/module-system/sov-modules-macros/src/cli_parser.rs b/module-system/sov-modules-macros/src/cli_parser.rs index 320a86d10..2c70e6009 100644 --- a/module-system/sov-modules-macros/src/cli_parser.rs +++ b/module-system/sov-modules-macros/src/cli_parser.rs @@ -56,9 +56,6 @@ impl CliParserMacro { #field_name(<<#module_path as ::sov_modules_api::Module>::CallMessage as ::sov_modules_api::CliWalletArg>::CliStringRepr) }); - let field_name_string = field_name.to_string(); - let encode_function_name = format_ident!("encode_{}_call", field_name_string); - let type_name_string = match &field.ty { Type::Path(type_path) => extract_ident(type_path).to_string(), _ => { @@ -73,7 +70,7 @@ impl CliParserMacro { parse_match_arms.push(quote! { CliTransactionParser::#field_name(mod_args) => { let command_as_call_message: <#module_path as ::sov_modules_api::Module>::CallMessage = mod_args.into(); - #ident:: #ty_generics ::#encode_function_name( + <#ident:: #ty_generics as ::sov_modules_api::EncodeCall<#module_path>> ::encode_call( command_as_call_message ) }, @@ -105,8 +102,10 @@ impl CliParserMacro { // Build the `match` arms for the CLI's json parser match_arms.push(quote! { #type_name_string => Ok({ - #ident:: #ty_generics ::#encode_function_name( - ::serde_json::from_str::<<#module_path as ::sov_modules_api::Module>::CallMessage>(&call_data)? + let _data: <#module_path as ::sov_modules_api::Module>::CallMessage = + ::serde_json::from_str::<<#module_path as ::sov_modules_api::Module>::CallMessage>(&call_data)?; + <#ident:: #ty_generics as ::sov_modules_api::EncodeCall<#module_path>> ::encode_call( + _data ) }), }); diff --git a/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs b/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs index 5da10e0a1..29336109e 100644 --- a/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs +++ b/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs @@ -57,7 +57,6 @@ impl<'a> StructDef<'a> { type Context = #generic_param; type Decodable = #call_enum #ty_generics; - fn decode_call(serialized_message: &[u8]) -> core::result::Result { let mut data = std::io::Cursor::new(serialized_message); <#call_enum #ty_generics as ::borsh::BorshDeserialize>::deserialize_reader(&mut data) diff --git a/module-system/sov-modules-macros/src/dispatch/message_codec.rs b/module-system/sov-modules-macros/src/dispatch/message_codec.rs index 54a13a395..57f27e915 100644 --- a/module-system/sov-modules-macros/src/dispatch/message_codec.rs +++ b/module-system/sov-modules-macros/src/dispatch/message_codec.rs @@ -1,45 +1,40 @@ use proc_macro2::{Span, TokenStream}; -use quote::format_ident; use syn::DeriveInput; use crate::common::{get_generics_type_param, StructDef, StructFieldExtractor, CALL}; impl<'a> StructDef<'a> { fn create_message_codec(&self) -> TokenStream { + let original_ident = &self.ident; let call_enum = self.enum_ident(CALL); let ty_generics = &self.type_generics; + let impl_generics = &self.impl_generics; + let where_clause = &self.where_clause; let fns = self.fields.iter().map(|field| { let variant = &field.ident; let ty = &field.ty; - let fn_call_name = format_ident!("encode_{}_call", &field.ident); - - - let call_doc = format!("Encodes {} call message.",field.ident); + let call_doc = format!("Encodes {} call message.", field.ident); // Creates functions like: // encode_*module_name*_call(data: ..) -> Vec // encode_*module_name*_query(data: ..) -> Vec quote::quote! { + impl #impl_generics sov_modules_api::EncodeCall<#ty> for #original_ident #ty_generics #where_clause { #[doc = #call_doc] - pub fn #fn_call_name(data: <#ty as sov_modules_api::Module>::CallMessage) -> std::vec::Vec { + fn encode_call(data: <#ty as sov_modules_api::Module>::CallMessage) -> std::vec::Vec { let call = #call_enum:: #ty_generics ::#variant(data); ::borsh::BorshSerialize::try_to_vec(&call).unwrap() } } + } }); - let original_ident = &self.ident; - let impl_generics = &self.impl_generics; - let where_clause = self.where_clause; - // Adds decoding functionality to the underlying type and // hides auto generated types behind impl DispatchCall. quote::quote! { - impl #impl_generics #original_ident #ty_generics #where_clause { - #(#fns)* - } + #(#fns)* } } } diff --git a/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs b/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs index 41c977d76..38952914d 100644 --- a/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs +++ b/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs @@ -3,7 +3,9 @@ use modules::third_test_module::{self, ModuleThreeStorable}; use modules::{first_test_module, second_test_module}; use sov_modules_api::default_context::ZkDefaultContext; use sov_modules_api::macros::DefaultRuntime; -use sov_modules_api::{Address, Context, DispatchCall, Genesis, MessageCodec, ModuleInfo}; +use sov_modules_api::{ + Address, Context, DispatchCall, EncodeCall, Genesis, MessageCodec, ModuleInfo, +}; use sov_state::ZkStorage; #[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] @@ -31,7 +33,9 @@ fn main() { let value = 11; { let message = value; - let serialized_message = RT::encode_first_call(message); + let serialized_message = , + >>::encode_call(message); let module = RT::decode_call(&serialized_message).unwrap(); assert_eq!(runtime.module_address(&module), runtime.first.address()); @@ -48,7 +52,9 @@ fn main() { let value = 22; { let message = value; - let serialized_message = RT::encode_second_call(message); + let serialized_message = , + >>::encode_call(message); let module = RT::decode_call(&serialized_message).unwrap(); assert_eq!(runtime.module_address(&module), runtime.second.address()); diff --git a/module-system/sov-modules-stf-template/Cargo.toml b/module-system/sov-modules-stf-template/Cargo.toml index 577470e7a..7b8697c64 100644 --- a/module-system/sov-modules-stf-template/Cargo.toml +++ b/module-system/sov-modules-stf-template/Cargo.toml @@ -13,6 +13,7 @@ resolver = "2" [dependencies] anyhow = { workspace = true } +thiserror = { workspace = true } borsh = { workspace = true } serde = { workspace = true, features = ["derive"] } tracing = { workspace = true } diff --git a/module-system/sov-modules-stf-template/src/app_template.rs b/module-system/sov-modules-stf-template/src/app_template.rs index 8f2d828e5..f056188ea 100644 --- a/module-system/sov-modules-stf-template/src/app_template.rs +++ b/module-system/sov-modules-stf-template/src/app_template.rs @@ -4,6 +4,7 @@ use borsh::BorshDeserialize; use sov_modules_api::{Context, DispatchCall}; use sov_rollup_interface::da::{BlobReaderTrait, CountedBufReader}; use sov_rollup_interface::stf::{BatchReceipt, TransactionReceipt}; +use sov_rollup_interface::zk::ValidityCondition; use sov_rollup_interface::Buf; use sov_state::StateCheckpoint; use tracing::{debug, error}; @@ -16,13 +17,14 @@ type ApplyBatchResult = Result; /// An implementation of the /// [`StateTransitionFunction`](sov_rollup_interface::stf::StateTransitionFunction) /// that is specifically designed to work with the module-system. -pub struct AppTemplate { +pub struct AppTemplate, B> { /// State storage used by the rollup. pub current_storage: C::Storage, /// The runtime includes all the modules that the rollup supports. pub runtime: RT, pub(crate) checkpoint: Option>, phantom_vm: PhantomData, + phantom_cond: PhantomData, phantom_blob: PhantomData, } @@ -61,9 +63,10 @@ impl From for BatchReceipt { } } -impl AppTemplate +impl, B: BlobReaderTrait> + AppTemplate where - RT: Runtime, + RT: Runtime, { /// [`AppTemplate`] constructor. pub fn new(storage: C::Storage, runtime: RT) -> Self { @@ -72,6 +75,7 @@ where current_storage: storage, checkpoint: None, phantom_vm: PhantomData, + phantom_cond: PhantomData, phantom_blob: PhantomData, } } diff --git a/module-system/sov-modules-stf-template/src/lib.rs b/module-system/sov-modules-stf-template/src/lib.rs index cf5c1f3cd..2a29592f5 100644 --- a/module-system/sov-modules-stf-template/src/lib.rs +++ b/module-system/sov-modules-stf-template/src/lib.rs @@ -6,20 +6,22 @@ mod tx_verifier; pub use app_template::AppTemplate; pub use batch::Batch; -use sov_modules_api::hooks::{ApplyBlobHooks, TxHooks}; +use sov_modules_api::hooks::{ApplyBlobHooks, SlotHooks, TxHooks}; use sov_modules_api::{Context, DispatchCall, Genesis, Spec}; use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::{SlotResult, StateTransitionFunction}; -use sov_rollup_interface::zk::Zkvm; -use sov_state::{StateCheckpoint, Storage}; +use sov_rollup_interface::zk::{ValidityCondition, Zkvm}; +use sov_state::{StateCheckpoint, Storage, WorkingSet}; use tracing::info; pub use tx_verifier::RawTx; /// This trait has to be implemented by a runtime in order to be used in `AppTemplate`. -pub trait Runtime: +pub trait Runtime: DispatchCall + Genesis + TxHooks + + SlotHooks + ApplyBlobHooks { } @@ -64,12 +66,23 @@ pub enum SlashingReason { InvalidTransactionEncoding, } -impl AppTemplate { - fn begin_slot(&mut self, witness: <::Storage as Storage>::Witness) { - self.checkpoint = Some(StateCheckpoint::with_witness( - self.current_storage.clone(), - witness, - )); +impl + AppTemplate +where + RT: Runtime, +{ + fn begin_slot( + &mut self, + slot_data: &impl SlotData, + witness: >::Witness, + ) { + let state_checkpoint = StateCheckpoint::with_witness(self.current_storage.clone(), witness); + + let mut working_set = state_checkpoint.to_revertable(); + + self.runtime.begin_slot_hook(slot_data, &mut working_set); + + self.checkpoint = Some(working_set.checkpoint()); } fn end_slot(&mut self) -> (jmt::RootHash, <::Storage as Storage>::Witness) { @@ -78,14 +91,19 @@ impl AppTemplate { .current_storage .validate_and_commit(cache_log, &witness) .expect("jellyfish merkle tree update must succeed"); + + let mut working_set = WorkingSet::new(self.current_storage.clone()); + + self.runtime.end_slot_hook(&mut working_set); + (jmt::RootHash(root_hash), witness) } } -impl StateTransitionFunction - for AppTemplate +impl + StateTransitionFunction for AppTemplate where - RT: Runtime, + RT: Runtime, { type StateRoot = jmt::RootHash; @@ -97,7 +115,9 @@ where type Witness = <::Storage as Storage>::Witness; - fn init_chain(&mut self, params: Self::InitialState) { + type Condition = Cond; + + fn init_chain(&mut self, params: Self::InitialState) -> jmt::RootHash { let mut working_set = StateCheckpoint::new(self.current_storage.clone()).to_revertable(); self.runtime @@ -105,14 +125,18 @@ where .expect("module initialization must succeed"); let (log, witness) = working_set.checkpoint().freeze(); - self.current_storage + let genesis_hash = self + .current_storage .validate_and_commit(log, &witness) .expect("Storage update must succeed"); + + jmt::RootHash(genesis_hash) } - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, witness: Self::Witness, + slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -122,8 +146,9 @@ where > where I: IntoIterator, + Data: SlotData, { - self.begin_slot(witness); + self.begin_slot(slot_data, witness); let mut batch_receipts = vec![]; for (blob_idx, blob) in blobs.into_iter().enumerate() { @@ -154,4 +179,10 @@ where witness, } } + + fn get_current_state_root(&self) -> anyhow::Result { + self.current_storage + .get_state_root(&Default::default()) + .map(jmt::RootHash) + } } diff --git a/module-system/sov-state/src/internal_cache.rs b/module-system/sov-state/src/internal_cache.rs index 34428225b..790dfd636 100644 --- a/module-system/sov-state/src/internal_cache.rs +++ b/module-system/sov-state/src/internal_cache.rs @@ -50,9 +50,7 @@ impl StorageInternalCache { let cache_value = self.get_value_from_cache(cache_key.clone()); match cache_value { - cache::ValueExists::Yes(cache_value_exists) => { - cache_value_exists.map(StorageValue::new_from_cache_value) - } + cache::ValueExists::Yes(cache_value_exists) => cache_value_exists.map(Into::into), // If the value does not exist in the cache, then fetch it from an external source. cache::ValueExists::No => { let storage_value = value_reader.get(key, witness); diff --git a/module-system/sov-state/src/lib.rs b/module-system/sov-state/src/lib.rs index 0002eea8d..e70c3aeaf 100644 --- a/module-system/sov-state/src/lib.rs +++ b/module-system/sov-state/src/lib.rs @@ -1,16 +1,26 @@ mod internal_cache; mod map; + #[cfg(feature = "native")] mod prover_storage; -mod scratchpad; -pub mod storage; + #[cfg(feature = "native")] mod tree_db; + +mod scratchpad; + +pub mod storage; + mod utils; mod value; mod witness; + +pub use value::SingletonKey; + mod zk_storage; +pub use zk_storage::ZkStorage; + pub mod config; #[cfg(test)] mod state_tests; @@ -23,10 +33,10 @@ pub use map::StateMap; pub use prover_storage::{delete_storage, ProverStorage}; pub use scratchpad::*; pub use sov_first_read_last_write_cache::cache::CacheLog; +use sov_rollup_interface::digest::Digest; pub use storage::Storage; use utils::AlignedVec; pub use value::StateValue; -pub use zk_storage::ZkStorage; pub use crate::witness::{ArrayWitness, TreeWitnessReader, Witness}; @@ -84,7 +94,7 @@ pub trait MerkleProofSpec { type Hasher: Digest; } -use sha2::{Digest, Sha256}; +use sha2::Sha256; #[derive(Clone)] pub struct DefaultStorageSpec; diff --git a/module-system/sov-state/src/prover_storage.rs b/module-system/sov-state/src/prover_storage.rs index 6bfaffc05..6c4a039d0 100644 --- a/module-system/sov-state/src/prover_storage.rs +++ b/module-system/sov-state/src/prover_storage.rs @@ -4,12 +4,12 @@ use std::path::Path; use std::sync::Arc; use jmt::storage::TreeWriter; -use jmt::{JellyfishMerkleTree, KeyHash}; +use jmt::{JellyfishMerkleTree, KeyHash, RootHash, Version}; use sov_db::state_db::StateDB; use crate::config::Config; use crate::internal_cache::OrderedReadsAndWrites; -use crate::storage::{StorageKey, StorageValue}; +use crate::storage::{NativeStorage, StorageKey, StorageProof, StorageValue}; use crate::tree_db::TreeReadLogger; use crate::witness::Witness; use crate::{MerkleProofSpec, Storage}; @@ -46,16 +46,23 @@ impl ProverStorage { .db .get_value_option_by_key(self.db.get_next_version(), key.as_ref()) { - Ok(value) => value.map(StorageValue::new_from_bytes), + Ok(value) => value.map(Into::into), // It is ok to panic here, we assume the db is available and consistent. Err(e) => panic!("Unable to read value from db: {e}"), } } + + fn get_root_hash(&self, version: Version) -> Result { + let temp_merkle: JellyfishMerkleTree<'_, StateDB, S::Hasher> = + JellyfishMerkleTree::new(&self.db); + temp_merkle.get_root_hash(version) + } } impl Storage for ProverStorage { type Witness = S::Witness; type RuntimeConfig = Config; + type Proof = jmt::proof::SparseMerkleProof; fn with_config(config: Self::RuntimeConfig) -> Result { Self::with_path(config.path.as_path()) @@ -67,6 +74,11 @@ impl Storage for ProverStorage { val } + fn get_state_root(&self, _witness: &Self::Witness) -> anyhow::Result<[u8; 32]> { + self.get_root_hash(self.db.get_next_version() - 1) + .map(|root| root.0) + } + fn validate_and_commit( &self, state_accesses: OrderedReadsAndWrites, @@ -138,6 +150,37 @@ impl Storage for ProverStorage { fn is_empty(&self) -> bool { self.db.get_next_version() <= 1 } + + fn open_proof( + &self, + state_root: [u8; 32], + state_proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error> { + let StorageProof { key, value, proof } = state_proof; + let key_hash = KeyHash::with::(key.as_ref()); + + proof.verify( + jmt::RootHash(state_root), + key_hash, + value.as_ref().map(|v| v.value()), + )?; + Ok((key, value)) + } +} + +impl NativeStorage for ProverStorage { + type ValueWithProof = (Option, Self::Proof); + + fn get_with_proof(&self, key: StorageKey, _witness: &Self::Witness) -> Self::ValueWithProof { + let merkle = JellyfishMerkleTree::::new(&self.db); + let (val_opt, proof) = merkle + .get_with_proof( + KeyHash::with::(key.as_ref()), + self.db.get_next_version() - 1, + ) + .unwrap(); + (val_opt.as_ref().map(StorageValue::new), proof) + } } pub fn delete_storage(path: impl AsRef) { diff --git a/module-system/sov-state/src/scratchpad.rs b/module-system/sov-state/src/scratchpad.rs index f28f537e6..1773e3a16 100644 --- a/module-system/sov-state/src/scratchpad.rs +++ b/module-system/sov-state/src/scratchpad.rs @@ -53,6 +53,10 @@ impl StateCheckpoint { } } + pub fn get(&mut self, key: StorageKey) -> Option { + self.delta.get(key) + } + pub fn with_witness(inner: S, witness: S::Witness) -> Self { Self { delta: Delta::with_witness(inner, witness), @@ -125,7 +129,6 @@ impl WorkingSet { &self.events } - #[cfg(test)] pub fn backing(&self) -> &S { &self.delta.inner.inner } @@ -183,7 +186,7 @@ impl RevertableDelta { fn get(&mut self, key: StorageKey) -> Option { let key = key.as_cache_key(); if let Some(value) = self.writes.get(&key) { - return value.clone().map(StorageValue::new_from_cache_value); + return value.clone().map(Into::into); } self.inner.get(key.into()) } @@ -204,7 +207,7 @@ impl RevertableDelta { for (k, v) in self.writes.into_iter() { if let Some(v) = v { - inner.set(k.into(), StorageValue::new_from_cache_value(v)); + inner.set(k.into(), v.into()); } else { inner.delete(k.into()); } diff --git a/module-system/sov-state/src/state_tests.rs b/module-system/sov-state/src/state_tests.rs index a0e77cf9d..0e7ece162 100644 --- a/module-system/sov-state/src/state_tests.rs +++ b/module-system/sov-state/src/state_tests.rs @@ -1,7 +1,7 @@ use std::path::Path; use super::*; -use crate::{ArrayWitness, DefaultStorageSpec, ProverStorage}; +use crate::{DefaultStorageSpec, ProverStorage}; enum Operation { Merge, diff --git a/module-system/sov-state/src/storage.rs b/module-system/sov-state/src/storage.rs index 60dada67d..58e0617fc 100644 --- a/module-system/sov-state/src/storage.rs +++ b/module-system/sov-state/src/storage.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use borsh::{BorshDeserialize, BorshSerialize}; use hex; +use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use sov_first_read_last_write_cache::{CacheKey, CacheValue}; @@ -12,7 +13,7 @@ use crate::witness::Witness; use crate::Prefix; // `Key` type for the `Storage` -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, BorshDeserialize, BorshSerialize)] pub struct StorageKey { key: Arc>, } @@ -62,52 +63,85 @@ impl StorageKey { } } -// `Value` type for the `Storage` -#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize)] +/// A serialized value suitable for storing. Internally uses an Arc> for cheap cloning. +#[derive( + Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Default, +)] pub struct StorageValue { value: Arc>, } +impl From for StorageValue { + fn from(cache_value: CacheValue) -> Self { + Self { + value: cache_value.value, + } + } +} + +impl From> for StorageValue { + fn from(value: Vec) -> Self { + Self { + value: Arc::new(value), + } + } +} + impl StorageValue { - pub fn new(value: &V) -> Self { + /// Create a new storage value by serializing the input + pub fn new(value: &impl BorshSerialize) -> Self { let encoded_value = value.try_to_vec().unwrap(); Self { value: Arc::new(encoded_value), } } + /// Get the bytes of this value. pub fn value(&self) -> &[u8] { &self.value } + /// Convert this value into a `CacheValue`. pub fn as_cache_value(self) -> CacheValue { CacheValue { value: self.value } } +} - pub fn new_from_cache_value(cache_value: CacheValue) -> Self { - Self { - value: cache_value.value, - } - } - - pub fn new_from_bytes(value: Vec) -> Self { - Self { - value: Arc::new(value), - } - } +#[derive(Debug, Clone, Serialize, Deserialize, BorshDeserialize, BorshSerialize)] +/// A proof that a particular storage key has a particular value, or is absent. +pub struct StorageProof

{ + /// The key which is proven + pub key: StorageKey, + /// The value, if any, which is proven + pub value: Option, + /// The cryptographic proof + pub proof: P, } /// An interface for storing and retrieving values in the storage. pub trait Storage: Clone { + /// The witness type for this storage instance. type Witness: Witness; + /// The runtime config for this storage instance. type RuntimeConfig; + /// A cryptographic proof that a particular key has a particular value, or is absent. + type Proof: Serialize + + DeserializeOwned + + core::fmt::Debug + + Clone + + BorshSerialize + + BorshDeserialize; + fn with_config(config: Self::RuntimeConfig) -> Result; /// Returns the value corresponding to the key or None if key is absent. fn get(&self, key: StorageKey, witness: &Self::Witness) -> Option; + /// Returns the latest state root hash from the storage. + fn get_state_root(&self, witness: &Self::Witness) -> anyhow::Result<[u8; 32]>; + /// Validate all of the storage accesses in a particular cache log, /// returning the new state root after applying all writes fn validate_and_commit( @@ -116,6 +150,14 @@ pub trait Storage: Clone { witness: &Self::Witness, ) -> Result<[u8; 32], anyhow::Error>; + /// Opens a storage access proof and validates it against a state root. + /// It returns a result with the opened leaf (key, value) pair in case of success. + fn open_proof( + &self, + state_root: [u8; 32], + proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error>; + /// Indicates if storage is empty or not. /// Useful during initialization fn is_empty(&self) -> bool; @@ -140,3 +182,12 @@ impl From<&'static str> for StorageValue { } } } + +pub trait NativeStorage: Storage { + /// The object returned by `get_with_proof`. Should contain the returned value and the associated proof + type ValueWithProof; + + /// Returns the value corresponding to the key or None if key is absent and a proof to + /// get the value. Panics if [`get_with_proof_opt`] returns `None` in place of the proof. + fn get_with_proof(&self, key: StorageKey, witness: &Self::Witness) -> Self::ValueWithProof; +} diff --git a/module-system/sov-state/src/zk_storage.rs b/module-system/sov-state/src/zk_storage.rs index 6f6021325..216ab5408 100644 --- a/module-system/sov-state/src/zk_storage.rs +++ b/module-system/sov-state/src/zk_storage.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use jmt::{JellyfishMerkleTree, KeyHash, Version}; use crate::internal_cache::OrderedReadsAndWrites; -use crate::storage::{StorageKey, StorageValue}; +use crate::storage::{StorageKey, StorageProof, StorageValue}; use crate::witness::{TreeWitnessReader, Witness}; use crate::{MerkleProofSpec, Storage}; @@ -36,14 +36,20 @@ impl Storage for ZkStorage { type RuntimeConfig = [u8; 32]; + type Proof = jmt::proof::SparseMerkleProof; + fn with_config(config: Self::RuntimeConfig) -> Result { Ok(Self::new(config)) } - fn get(&self, _key: StorageKey, witness: &S::Witness) -> Option { + fn get(&self, _key: StorageKey, witness: &Self::Witness) -> Option { witness.get_hint() } + fn get_state_root(&self, witness: &Self::Witness) -> anyhow::Result<[u8; 32]> { + Ok(witness.get_hint()) + } + fn validate_and_commit( &self, state_accesses: OrderedReadsAndWrites, @@ -94,4 +100,20 @@ impl Storage for ZkStorage { fn is_empty(&self) -> bool { unimplemented!("Needs simplification in JellyfishMerkleTree: https://github.com/Sovereign-Labs/sovereign-sdk/issues/362") } + + fn open_proof( + &self, + state_root: [u8; 32], + state_proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error> { + let StorageProof { key, value, proof } = state_proof; + let key_hash = KeyHash::with::(key.as_ref()); + + proof.verify( + jmt::RootHash(state_root), + key_hash, + value.as_ref().map(|v| v.value()), + )?; + Ok((key, value)) + } } diff --git a/module-system/utils/sov-data-generators/Cargo.toml b/module-system/utils/sov-data-generators/Cargo.toml new file mode 100644 index 000000000..95a7c0a8e --- /dev/null +++ b/module-system/utils/sov-data-generators/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "sov-data-generators" +description = "A set of generator utils used to automatically produce and serialize transaction data" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +resolver = "2" + + +[dependencies] +sov-modules-api = { path = "../../sov-modules-api", default-features = false } +sov-modules-stf-template = { path = "../../sov-modules-stf-template" } +sov-value-setter = { path = "../../module-implementations/examples/sov-value-setter", default-features = false } +sov-election = { path = "../../module-implementations/examples/sov-election", default-features = false } +sov-bank = { path = "../../module-implementations/sov-bank", default-features = false } +sov-state = { path = "../../sov-state", default-features = false } +sov-rollup-interface = { path = "../../../rollup-interface", features = ["mocks"] } + +borsh = { workspace = true } + +[dev-dependencies] +proptest = { workspace = true } + +[features] +default = ["mocks", "native"] +mocks = [] +native = [ + "sov-modules-api/native", + "sov-state/native", + "sov-bank/native", + "sov-election/native", + "sov-value-setter/native", +] \ No newline at end of file diff --git a/module-system/utils/sov-data-generators/src/bank_data.rs b/module-system/utils/sov-data-generators/src/bank_data.rs new file mode 100644 index 000000000..7dd14a5d7 --- /dev/null +++ b/module-system/utils/sov-data-generators/src/bank_data.rs @@ -0,0 +1,122 @@ +use std::rc::Rc; + +use sov_bank::{get_token_address, Bank, CallMessage, Coins}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_api::transaction::Transaction; +use sov_modules_api::utils::generate_address; +use sov_modules_api::{Context, EncodeCall, Module, PrivateKey, Spec}; + +use crate::{Message, MessageGenerator}; + +pub struct TransferData { + pub sender_pkey: Rc, + pub receiver_address: ::Address, + pub token_address: ::Address, + pub transfer_amount: u64, +} + +pub struct MintData { + pub token_name: String, + pub salt: u64, + pub initial_balance: u64, + pub minter_address: ::Address, + pub minter_pkey: Rc, + pub authorized_minters: Vec<::Address>, +} + +pub struct BankMessageGenerator { + pub token_mint_txs: Vec>, + pub transfer_txs: Vec>, +} + +impl Default for BankMessageGenerator { + fn default() -> Self { + let minter_address = generate_address::("just_sender"); + let salt = 10; + let token_name = "Token1".to_owned(); + let mint_data = MintData { + token_name: token_name.clone(), + salt, + initial_balance: 1000, + minter_address, + minter_pkey: Rc::new(DefaultPrivateKey::generate()), + authorized_minters: Vec::from([minter_address]), + }; + Self { + token_mint_txs: Vec::from([mint_data]), + transfer_txs: Vec::from([TransferData { + sender_pkey: Rc::new(DefaultPrivateKey::generate()), + transfer_amount: 15, + receiver_address: generate_address::("just_receiver"), + token_address: get_token_address::( + &token_name, + minter_address.as_ref(), + salt, + ), + }]), + } + } +} + +pub(crate) fn mint_token_tx(mint_data: &MintData) -> CallMessage { + CallMessage::CreateToken { + salt: mint_data.salt, + token_name: mint_data.token_name.clone(), + initial_balance: mint_data.initial_balance, + minter_address: mint_data.minter_address.clone(), + authorized_minters: mint_data.authorized_minters.clone(), + } +} + +pub(crate) fn transfer_token_tx(transfer_data: &TransferData) -> CallMessage { + CallMessage::Transfer { + to: transfer_data.receiver_address.clone(), + coins: Coins { + amount: transfer_data.transfer_amount, + token_address: transfer_data.token_address.clone(), + }, + } +} + +impl MessageGenerator for BankMessageGenerator { + type Module = Bank; + type Context = C; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::>>::new(); + + let mut nonce = 0; + + for mint_message in &self.token_mint_txs { + messages.push(Message::new( + mint_message.minter_pkey.clone(), + mint_token_tx::(mint_message), + nonce, + )); + nonce += 1; + } + + for transfer_message in &self.transfer_txs { + messages.push(Message::new( + transfer_message.sender_pkey.clone(), + transfer_token_tx::(transfer_message), + nonce, + )); + nonce += 1; + } + + messages + } + + fn create_tx>( + &self, + sender: &::PrivateKey, + message: ::CallMessage, + nonce: u64, + _is_last: bool, + ) -> sov_modules_api::transaction::Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} diff --git a/module-system/utils/sov-data-generators/src/election_data.rs b/module-system/utils/sov-data-generators/src/election_data.rs new file mode 100644 index 000000000..751a1cf98 --- /dev/null +++ b/module-system/utils/sov-data-generators/src/election_data.rs @@ -0,0 +1,296 @@ +use std::marker::PhantomData; +use std::rc::Rc; + +use sov_election::Election; +use sov_modules_api::{EncodeCall, PrivateKey, PublicKey}; + +use super::*; + +struct CallGenerator { + election_admin_nonce: u64, + election_admin: Rc, + voters: Vec>, + phantom_context: PhantomData, +} + +impl CallGenerator { + fn new(election_admin: Rc) -> Self { + let voters = vec![ + Rc::new(C::PrivateKey::generate()), + Rc::new(C::PrivateKey::generate()), + Rc::new(C::PrivateKey::generate()), + ]; + Self { + election_admin_nonce: 0, + election_admin, + voters, + phantom_context: Default::default(), + } + } + + fn inc_nonce(&mut self) { + self.election_admin_nonce += 1; + } + + fn create_voters_and_vote(&mut self) -> Vec>> { + let mut messages = Vec::default(); + + let set_candidates_message = sov_election::CallMessage::SetCandidates { + names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], + }; + + messages.push(Message::new( + self.election_admin.clone(), + set_candidates_message, + self.election_admin_nonce, + )); + self.inc_nonce(); + + for voter in self.voters.clone() { + let add_voter_message = + sov_election::CallMessage::AddVoter(voter.pub_key().to_address()); + + messages.push(Message::new( + self.election_admin.clone(), + add_voter_message, + self.election_admin_nonce, + )); + + let vote_message = sov_election::CallMessage::Vote(1); + messages.push(Message::new(voter, vote_message, 0)); + self.inc_nonce(); + } + + messages + } + + fn freeze_vote(&mut self) -> Vec>> { + let mut messages = Vec::default(); + + let freeze_message = sov_election::CallMessage::FreezeElection; + messages.push(Message::new( + self.election_admin.clone(), + freeze_message, + self.election_admin_nonce, + )); + self.inc_nonce(); + + messages + } + + fn all_messages(&mut self) -> Vec>> { + let mut messages = Vec::default(); + + messages.extend(self.create_voters_and_vote()); + messages.extend(self.freeze_vote()); + messages + } +} + +pub struct ElectionCallMessages { + election_admin: Rc, + phantom_context: PhantomData, +} + +impl ElectionCallMessages { + pub fn new(election_admin: C::PrivateKey) -> Self { + Self { + election_admin: Rc::new(election_admin), + phantom_context: Default::default(), + } + } +} + +impl MessageGenerator for ElectionCallMessages { + type Module = Election; + type Context = C; + + fn create_messages(&self) -> Vec>> { + let call_generator = &mut CallGenerator::new(self.election_admin.clone()); + call_generator.all_messages() + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: ::CallMessage, + nonce: u64, + _is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} + +pub struct InvalidElectionCallMessages { + election_admin: Rc, + phantom_context: PhantomData, +} + +impl InvalidElectionCallMessages { + pub fn new(election_admin: C::PrivateKey) -> Self { + Self { + election_admin: Rc::new(election_admin), + phantom_context: Default::default(), + } + } +} + +impl MessageGenerator for InvalidElectionCallMessages { + type Module = Election; + type Context = C; + + fn create_messages(&self) -> Vec>> { + let call_generator = &mut CallGenerator::new(self.election_admin.clone()); + + let mut messages = Vec::default(); + + messages.extend(call_generator.create_voters_and_vote()); + + // Additional invalid message: This voter already voted. + { + // Need to do the cloning in two steps because type inference doesn't work otherwise + let voter_ref: &Rc<::PrivateKey> = &call_generator.voters[0]; + let voter = voter_ref.clone(); + let vote_message = sov_election::CallMessage::Vote(1); + messages.push(Message::new(voter, vote_message, 1)); + } + + messages.extend(call_generator.freeze_vote()); + messages + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: as Module>::CallMessage, + nonce: u64, + _is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} + +pub struct BadSigElectionCallMessages { + election_admin: Rc, + phantom_context: PhantomData, +} + +impl BadSigElectionCallMessages { + pub fn new(election_admin: C::PrivateKey) -> Self { + Self { + election_admin: Rc::new(election_admin), + phantom_context: Default::default(), + } + } +} + +impl MessageGenerator for BadSigElectionCallMessages { + type Module = Election; + type Context = C; + + fn create_messages(&self) -> Vec>> { + let call_generator = &mut CallGenerator::new(self.election_admin.clone()); + call_generator.all_messages() + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: as Module>::CallMessage, + nonce: u64, + is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + + if is_last { + let tx = Transaction::::new_signed_tx(sender, message.clone(), nonce); + Transaction::new( + C::PrivateKey::generate().pub_key(), + message, + tx.signature().clone(), + nonce, + ) + } else { + Transaction::::new_signed_tx(sender, message, nonce) + } + } +} + +pub struct BadNonceElectionCallMessages { + election_admin: Rc, + phantom_context: PhantomData, +} + +impl BadNonceElectionCallMessages { + pub fn new(election_admin: C::PrivateKey) -> Self { + Self { + election_admin: Rc::new(election_admin), + phantom_context: Default::default(), + } + } +} + +impl MessageGenerator for BadNonceElectionCallMessages { + type Module = Election; + type Context = C; + + fn create_messages(&self) -> Vec>> { + let call_generator = &mut CallGenerator::new(self.election_admin.clone()); + call_generator.all_messages() + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: as Module>::CallMessage, + nonce: u64, + flag: bool, + ) -> Transaction { + let nonce = if flag { nonce + 1 } else { nonce }; + + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} + +pub struct BadSerializationElectionCallMessages { + election_admin: Rc, + phantom_context: PhantomData, +} + +impl BadSerializationElectionCallMessages { + pub fn new(election_admin: C::PrivateKey) -> Self { + Self { + election_admin: Rc::new(election_admin), + phantom_context: Default::default(), + } + } +} + +impl MessageGenerator for BadSerializationElectionCallMessages { + type Module = Election; + type Context = C; + + fn create_messages(&self) -> Vec>> { + let call_generator = &mut CallGenerator::new(self.election_admin.clone()); + call_generator.all_messages() + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: as Module>::CallMessage, + nonce: u64, + is_last: bool, + ) -> Transaction { + let call_data = if is_last { + vec![1, 2, 3] + } else { + Encoder::encode_call(message) + }; + + Transaction::::new_signed_tx(sender, call_data, nonce) + } +} diff --git a/module-system/utils/sov-data-generators/src/lib.rs b/module-system/utils/sov-data-generators/src/lib.rs new file mode 100644 index 000000000..2942a5601 --- /dev/null +++ b/module-system/utils/sov-data-generators/src/lib.rs @@ -0,0 +1,107 @@ +#[cfg(feature = "native")] +use std::rc::Rc; + +use borsh::ser::BorshSerialize; +#[cfg(feature = "native")] +use sov_modules_api::transaction::Transaction; +use sov_modules_api::Address; +pub use sov_modules_api::EncodeCall; +#[cfg(feature = "native")] +use sov_modules_api::{Context, Module, Spec}; +#[cfg(feature = "native")] +use sov_modules_stf_template::RawTx; +use sov_modules_stf_template::{Batch, SequencerOutcome, TxEffect}; +use sov_rollup_interface::mocks::TestBlob; +use sov_rollup_interface::stf::BatchReceipt; + +#[cfg(feature = "native")] +pub mod bank_data; +#[cfg(feature = "native")] +pub mod election_data; +#[cfg(feature = "native")] +pub mod value_setter_data; + +pub fn new_test_blob_from_batch(batch: Batch, address: &[u8], hash: [u8; 32]) -> TestBlob

{ + let address = Address::try_from(address).unwrap(); + let data = batch.try_to_vec().unwrap(); + TestBlob::new(data, address, hash) +} + +pub fn has_tx_events(apply_blob_outcome: &BatchReceipt) -> bool { + let events = apply_blob_outcome + .tx_receipts + .iter() + .flat_map(|receipts| receipts.events.iter()); + + events.peekable().peek().is_some() +} + +#[cfg(feature = "native")] +/// A generic message object used to create transactions. +pub struct Message { + /// The sender's private key. + pub sender_key: Rc<::PrivateKey>, + /// The message content. + pub content: Mod::CallMessage, + /// The message nonce. + pub nonce: u64, +} + +#[cfg(feature = "native")] +impl Message { + fn new(sender_key: Rc<::PrivateKey>, content: Mod::CallMessage, nonce: u64) -> Self { + Self { + sender_key, + content, + nonce, + } + } +} + +#[cfg(feature = "native")] +/// Trait used to generate messages from the DA layer to automate module testing +pub trait MessageGenerator { + /// Module where the messages originate from. + type Module: Module; + + /// Module context + type Context: Context; + + /// Generates a list of messages originating from the module. + fn create_messages(&self) -> Vec>; + + /// Creates a transaction object associated with a call message, for a given module. + fn create_tx>( + &self, + // Private key of the sender + sender: &::PrivateKey, + // The message itself + message: ::CallMessage, + // The message nonce + nonce: u64, + // A boolean that indicates whether this message is the last one to be sent. + // Useful to perform some operations specifically on the last message. + is_last: bool, + ) -> Transaction; + + /// Creates a vector of raw transactions from the module. + fn create_raw_txs>(&self) -> Vec { + let mut messages_iter = self.create_messages().into_iter().peekable(); + let mut serialized_messages = Vec::default(); + while let Some(message) = messages_iter.next() { + let is_last = messages_iter.peek().is_none(); + + let tx = self.create_tx::( + &message.sender_key, + message.content, + message.nonce, + is_last, + ); + + serialized_messages.push(RawTx { + data: tx.try_to_vec().unwrap(), + }) + } + serialized_messages + } +} diff --git a/module-system/utils/sov-data-generators/src/value_setter_data.rs b/module-system/utils/sov-data-generators/src/value_setter_data.rs new file mode 100644 index 000000000..9bd80195c --- /dev/null +++ b/module-system/utils/sov-data-generators/src/value_setter_data.rs @@ -0,0 +1,71 @@ +use std::vec; + +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_api::PrivateKey; +use sov_value_setter::ValueSetter; + +use super::*; +use crate::EncodeCall; + +pub struct ValueSetterMessage { + pub admin: Rc, + pub messages: Vec, +} + +pub struct ValueSetterMessages { + pub messages: Vec>, +} + +impl ValueSetterMessages { + pub fn new(messages: Vec>) -> Self { + Self { messages } + } +} + +impl Default for ValueSetterMessages { + /// This function will return a dummy value setter message containing one admin and two value setter messages. + fn default() -> Self { + Self::new(vec![ValueSetterMessage { + admin: Rc::new(DefaultPrivateKey::generate()), + messages: vec![99, 33], + }]) + } +} + +impl MessageGenerator for ValueSetterMessages { + type Module = ValueSetter; + type Context = C; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::default(); + for value_setter_message in &self.messages { + let admin = value_setter_message.admin.clone(); + + for (value_setter_admin_nonce, new_value) in + value_setter_message.messages.iter().enumerate() + { + let set_value_msg: sov_value_setter::CallMessage = + sov_value_setter::CallMessage::SetValue(*new_value); + + messages.push(Message::new( + admin.clone(), + set_value_msg, + value_setter_admin_nonce.try_into().unwrap(), + )); + } + } + messages + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: ::CallMessage, + nonce: u64, + _is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} diff --git a/rollup-interface/Cargo.toml b/rollup-interface/Cargo.toml index e971e4d99..6c9f752bd 100644 --- a/rollup-interface/Cargo.toml +++ b/rollup-interface/Cargo.toml @@ -24,7 +24,6 @@ serde = { workspace = true } bytes = { workspace = true } hex = { workspace = true, features = ["serde"] } digest = { workspace = true } - sha2 = { workspace = true, optional = true } anyhow = { workspace = true } diff --git a/rollup-interface/src/node/services/da.rs b/rollup-interface/src/node/services/da.rs index 39221f0f1..e783b9181 100644 --- a/rollup-interface/src/node/services/da.rs +++ b/rollup-interface/src/node/services/da.rs @@ -6,6 +6,7 @@ use serde::de::DeserializeOwned; use serde::Serialize; use crate::da::{BlockHeaderTrait, DaSpec}; +use crate::zk::ValidityCondition; /// A DaService is the local side of an RPC connection talking to a node of the DA layer /// It is *not* part of the logic that is zk-proven. @@ -21,7 +22,10 @@ pub trait DaService { type Spec: DaSpec; /// A DA layer block, possibly excluding some irrelevant information. - type FilteredBlock: SlotData::BlockHeader>; + type FilteredBlock: SlotData< + BlockHeader = ::BlockHeader, + Cond = ::ValidityCondition, + >; /// The error type for fallible methods. type Error: fmt::Debug + Send + Sync; @@ -100,8 +104,14 @@ pub trait SlotData: /// For these fields, we only ever store their *serialized* representation in memory or on disk. Only a few special /// fields like `data_root` are stored in decoded form in the `CelestiaHeader` struct. type BlockHeader: BlockHeaderTrait; + + /// The validity condition associated with the slot data. + type Cond: ValidityCondition; + /// The canonical hash of the DA layer block. fn hash(&self) -> [u8; 32]; /// The header of the DA layer block. fn header(&self) -> &Self::BlockHeader; + /// Get the validity condition set associated with the slot + fn validity_condition(&self) -> Self::Cond; } diff --git a/rollup-interface/src/state_machine/da.rs b/rollup-interface/src/state_machine/da.rs index 68386c115..4a9f51853 100644 --- a/rollup-interface/src/state_machine/da.rs +++ b/rollup-interface/src/state_machine/da.rs @@ -24,6 +24,9 @@ pub trait DaSpec { /// The transaction type used by the DA layer. type BlobTransaction: BlobReaderTrait; + /// Any conditions imposed by the DA layer which need to be checked outside of the SNARK + type ValidityCondition: ValidityCondition; + /// A proof that each tx in a set of blob transactions is included in a given block. type InclusionMultiProof: Serialize + DeserializeOwned; @@ -52,9 +55,6 @@ pub trait DaVerifier { /// TODO: Should we add `std::Error` bound so it can be `()?` ? type Error: Debug; - /// Any conditions imposed by the DA layer which need to be checked outside of the SNARK - type ValidityCondition: ValidityCondition; - /// Create a new da verifier with the given chain parameters fn new(params: ::ChainParams) -> Self; @@ -65,7 +65,7 @@ pub trait DaVerifier { txs: &[::BlobTransaction], inclusion_proof: ::InclusionMultiProof, completeness_proof: ::CompletenessProof, - ) -> Result; + ) -> Result<::ValidityCondition, Self::Error>; } #[derive(Debug, Clone, Serialize, Deserialize, BorshDeserialize, BorshSerialize, PartialEq)] @@ -107,6 +107,11 @@ impl CountedBufReader { pub fn acc(&self) -> &Vec { &self.reading_acc } + + /// Contains the total length of the data (length already read + length remaining) + pub fn total_len(&self) -> usize { + self.inner.remaining() + self.counter + } } impl Read for CountedBufReader { @@ -144,10 +149,8 @@ pub trait BlobReaderTrait: Serialize + DeserializeOwned + Send + Sync + 'static /// This function returns a mutable reference to the blob data fn data_mut(&mut self) -> &mut CountedBufReader; - /// The raw data of the blob. For example, the "calldata" of an Ethereum rollup transaction - /// This function clones the data of the blob to an external BufWithCounter - /// - /// This function returns a simple reference to the blob data + /// Returns a reference to a `CountedBufReader`, which allows the caller to re-read + /// any data read so far, but not to advance the buffer fn data(&self) -> &CountedBufReader; /// Returns the hash of the blob. If not provided with a hint, it is computed by hashing the blob data diff --git a/rollup-interface/src/state_machine/mocks.rs b/rollup-interface/src/state_machine/mocks.rs index 9962e86b5..ebdbf7926 100644 --- a/rollup-interface/src/state_machine/mocks.rs +++ b/rollup-interface/src/state_machine/mocks.rs @@ -1,7 +1,9 @@ //! Defines mock instantiations of many important traits, which are useful //! for testing, fuzzing, and benchmarking. + use std::fmt::Display; use std::io::Write; +use std::marker::PhantomData; use anyhow::{ensure, Error}; use borsh::{BorshDeserialize, BorshSerialize}; @@ -11,7 +13,7 @@ use sha2::Digest; use crate::da::{BlobReaderTrait, BlockHashTrait, BlockHeaderTrait, CountedBufReader, DaSpec}; use crate::services::da::SlotData; -use crate::zk::{Matches, Zkvm}; +use crate::zk::{Matches, ValidityCondition, ValidityConditionChecker, Zkvm}; use crate::AddressTrait; /// A mock commitment to a particular zkVM program. @@ -24,6 +26,29 @@ impl Matches for MockCodeCommitment { } } +/// A mock validity condition that is always valid, used as a genesis validity condition for now. +#[derive(Clone, Copy, Debug, BorshDeserialize, BorshSerialize, Serialize, Deserialize, Default)] +pub struct MockValidityCond; + +impl ValidityCondition for MockValidityCond { + type Error = anyhow::Error; + + fn combine(&self, _rhs: Self) -> Result { + Ok(MockValidityCond) + } +} +/// A mock validity condition checker that always return true. +#[derive(Debug, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] +pub struct MockValidityCondChecker; + +impl ValidityConditionChecker for MockValidityCondChecker { + type Error = anyhow::Error; + + fn check(&mut self, _condition: &MockValidityCond) -> Result<(), Self::Error> { + Ok(()) + } +} + /// A mock proof generated by a zkVM. #[derive(Debug, Clone, PartialEq, Eq, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] pub struct MockProof<'a> { @@ -85,6 +110,13 @@ impl Zkvm for MockZkvm { anyhow::ensure!(proof.is_valid, "Proof is not valid"); Ok(proof.log) } + + fn verify_and_extract_output( + _serialized_proof: &[u8], + _code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error> { + todo!("Need to specify an output format for the proof logs") + } } #[test] @@ -156,6 +188,42 @@ impl Display for MockAddress { impl AddressTrait for MockAddress {} +/// A trivial test validity condition structure that only contains a boolean +#[derive( + Debug, BorshDeserialize, BorshSerialize, Serialize, Deserialize, PartialEq, Clone, Copy, Default, +)] +pub struct TestValidityCond { + /// The associated validity condition field. If it is true, the validity condition is verified + pub is_valid: bool, +} + +impl ValidityCondition for TestValidityCond { + type Error = Error; + fn combine(&self, rhs: Self) -> Result { + Ok(TestValidityCond { + is_valid: self.is_valid & rhs.is_valid, + }) + } +} + +#[derive(Debug, BorshDeserialize, BorshSerialize)] +/// A mock validity condition checker that always evaluate to cond +pub struct TestValidityCondChecker { + phantom: PhantomData, +} + +impl ValidityConditionChecker for TestValidityCondChecker { + type Error = Error; + + fn check(&mut self, condition: &TestValidityCond) -> Result<(), Self::Error> { + if condition.is_valid { + Ok(()) + } else { + Err(anyhow::format_err!("Invalid mock validity condition")) + } + } +} + #[derive( Debug, Clone, @@ -218,7 +286,7 @@ impl AsRef<[u8]> for TestHash { impl BlockHashTrait for TestHash {} /// A mock block header used for testing. -#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone)] +#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone, Copy)] pub struct TestBlockHeader { /// The hash of the previous block. pub prev_hash: TestHash, @@ -237,7 +305,7 @@ impl BlockHeaderTrait for TestBlockHeader { } /// A mock block type used for testing. -#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone)] +#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone, Copy)] pub struct TestBlock { /// The hash of this block. pub curr_hash: [u8; 32], @@ -245,10 +313,27 @@ pub struct TestBlock { pub header: TestBlockHeader, /// The height of this block pub height: u64, + /// Validity condition + pub validity_cond: TestValidityCond, +} + +impl Default for TestBlock { + fn default() -> Self { + Self { + curr_hash: [0; 32], + header: TestBlockHeader { + prev_hash: TestHash([0; 32]), + }, + height: 0, + validity_cond: TestValidityCond::default(), + } + } } impl SlotData for TestBlock { type BlockHeader = TestBlockHeader; + type Cond = TestValidityCond; + fn hash(&self) -> [u8; 32] { self.curr_hash } @@ -256,6 +341,10 @@ impl SlotData for TestBlock { fn header(&self) -> &Self::BlockHeader { &self.header } + + fn validity_condition(&self) -> TestValidityCond { + self.validity_cond + } } /// A [`DaSpec`] suitable for testing. @@ -263,6 +352,7 @@ pub struct MockDaSpec; impl DaSpec for MockDaSpec { type SlotHash = TestHash; + type ValidityCondition = TestValidityCond; type BlockHeader = TestBlockHeader; type BlobTransaction = TestBlob; type InclusionMultiProof = [u8; 32]; diff --git a/rollup-interface/src/state_machine/stf.rs b/rollup-interface/src/state_machine/stf.rs index 744b456f3..06fcc747b 100644 --- a/rollup-interface/src/state_machine/stf.rs +++ b/rollup-interface/src/state_machine/stf.rs @@ -8,7 +8,8 @@ use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use crate::da::BlobReaderTrait; -use crate::zk::Zkvm; +use crate::services::da::SlotData; +use crate::zk::{ValidityCondition, Zkvm}; #[cfg(any(test, feature = "fuzzing"))] pub mod fuzzing; @@ -42,6 +43,7 @@ mod sealed { /// and may be queried via RPC. Receipts are generic over a type `R` which the rollup can use to /// store additional data, such as the status code of the transaction or the amout of gas used.s #[derive(Debug, Clone, Serialize, Deserialize)] +/// A receipt showing the result of a transaction pub struct TransactionReceipt { /// The canonical hash of this transaction pub tx_hash: [u8; 32], @@ -60,6 +62,7 @@ pub struct TransactionReceipt { /// can use to store arbitrary typed data, like the gas used by the batch. They are also generic over a type `TxReceiptContents`, /// since they contain a vectors of [`TransactionReceipt`]s. #[derive(Debug, Clone, Serialize, Deserialize)] +/// A receipt giving the outcome of a batch of transactions pub struct BatchReceipt { /// The canonical hash of this batch pub batch_hash: [u8; 32], @@ -99,6 +102,7 @@ pub trait StateTransitionFunction { /// The contents of a transaction receipt. This is the data that is persisted in the database type TxReceiptContents: Serialize + DeserializeOwned + Clone; + /// The contents of a batch receipt. This is the data that is persisted in the database type BatchReceiptContents: Serialize + DeserializeOwned + Clone; @@ -106,8 +110,12 @@ pub trait StateTransitionFunction { /// or validated together with proof during verification type Witness: Default + Serialize; - /// Perform one-time initialization for the genesis block. - fn init_chain(&mut self, params: Self::InitialState); + /// The validity condition that must be verified outside of the Vm + type Condition: ValidityCondition; + + /// Perform one-time initialization for the genesis block and returns the resulting root hash wrapped in a result. + /// If the init chain fails we panic. + fn init_chain(&mut self, params: Self::InitialState) -> Self::StateRoot; /// Called at each **DA-layer block** - whether or not that block contains any /// data relevant to the rollup. @@ -116,13 +124,16 @@ pub trait StateTransitionFunction { /// /// Applies batches of transactions to the rollup, /// slashing the sequencer who proposed the blob on failure. + /// The blobs are contained into a slot whose data is contained within the `slot_data` parameter, + /// this parameter is mainly used within the begin_slot hook. /// The concrete blob type is defined by the DA layer implementation, /// which is why we use a generic here instead of an associated type. /// /// Commits state changes to the database - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, witness: Self::Witness, + slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -131,7 +142,12 @@ pub trait StateTransitionFunction { Self::Witness, > where - I: IntoIterator; + I: IntoIterator, + Data: SlotData; + + /// Gets the state root from the associated state. If not available (because the chain has not been initialized yet), + /// return None. + fn get_current_state_root(&self) -> anyhow::Result; } /// A key-value pair representing a change to the rollup state diff --git a/rollup-interface/src/state_machine/zk/mod.rs b/rollup-interface/src/state_machine/zk/mod.rs index 04a7a9c5d..bb2672415 100644 --- a/rollup-interface/src/state_machine/zk/mod.rs +++ b/rollup-interface/src/state_machine/zk/mod.rs @@ -8,10 +8,13 @@ //! maintained by the Sovereign Labs team. use core::fmt::Debug; +use borsh::{BorshDeserialize, BorshSerialize}; use digest::Digest; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; +use crate::AddressTrait; + /// A trait implemented by the prover ("host") of a zkVM program. pub trait ZkvmHost: Zkvm { /// Give the guest a piece of advice non-deterministically @@ -37,6 +40,13 @@ pub trait Zkvm { serialized_proof: &'a [u8], code_commitment: &Self::CodeCommitment, ) -> Result<&'a [u8], Self::Error>; + + /// Same as [`verify`], except that instead of returning the output as a serialized array, + /// it returns a state transition structure. + fn verify_and_extract_output( + serialized_proof: &[u8], + code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error>; } /// A trait which is accessible from within a zkVM program. @@ -48,7 +58,9 @@ pub trait ZkvmGuest: Zkvm { } /// This trait is implemented on the struct/enum which expresses the validity condition -pub trait ValidityCondition: Serialize + DeserializeOwned { +pub trait ValidityCondition: + Serialize + DeserializeOwned + BorshDeserialize + BorshSerialize + Debug + Clone + Copy +{ /// The error type returned when two [`ValidityCondition`]s cannot be combined. type Error: Into; /// Combine two conditions into one (typically run inside a recursive proof). @@ -61,12 +73,18 @@ pub trait ValidityCondition: Serialize + DeserializeOwned { /// if and only if the condition `validity_condition` is satisfied. /// /// The period of time covered by a state transition proof may be a single slot, or a range of slots on the DA layer. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct StateTransition { +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct StateTransition { /// The state of the rollup before the transition pub initial_state_root: [u8; 32], /// The state of the rollup after the transition pub final_state_root: [u8; 32], + /// The slot hash of the state transition + pub slot_hash: [u8; 32], + + /// Rewarded address: the account that has produced the transition proof. + pub rewarded_address: Address, + /// An additional validity condition for the state transition which needs /// to be checked outside of the zkVM circuit. This typically corresponds to /// some claim about the DA layer history, such as (X) is a valid block on the DA layer @@ -74,7 +92,9 @@ pub struct StateTransition { } /// This trait expresses that a type can check a validity condition. -pub trait ValidityConditionChecker { +pub trait ValidityConditionChecker: + BorshDeserialize + BorshSerialize + Debug +{ /// The error type returned when a [`ValidityCondition`] is invalid. type Error: Into; /// Check a validity condition