diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..bff29e6e --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[build] +rustflags = ["--cfg", "tokio_unstable"] diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 00000000..9ac75914 --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "gitsubmodule" + directory: "/" + schedule: + interval: "daily" \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 74c9b8ed..ab0d7883 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1818,9 +1818,11 @@ dependencies = [ "color-eyre", "dotenvy", "hex", + "log", "mockall 0.13.0", "once_cell", "settlement-client-interface", + "test-log", "tokio", "utils", ] @@ -1904,7 +1906,7 @@ dependencies = [ [[package]] name = "aptos-testcontainer" version = "0.1.0" -source = "git+https://github.com/sota-zk-labs/aptos-testcontainer?branch=master#a963e4fd43ad6d21f42c42170b59c01d10c52573" +source = "git+https://github.com/sota-zk-labs/aptos-testcontainer?branch=master#ba2cfc2a0656e11204b40fe29f7c6b781ee7ab45" dependencies = [ "anyhow", "base64 0.22.1", @@ -8341,6 +8343,18 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "mockall_double" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1ca96e5ac35256ae3e13536edd39b172b88f41615e1d7b653c8ad24524113e8" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.72", +] + [[package]] name = "mongodb" version = "2.8.2" @@ -9493,10 +9507,12 @@ version = "0.1.0" dependencies = [ "alloy 0.2.1", "arc-swap", + "assert_matches", "async-std", "async-trait", "aws-config", "aws-sdk-s3", + "aws-sdk-sqs", "axum 0.7.5", "axum-macros", "bincode 1.3.3", @@ -9516,7 +9532,8 @@ dependencies = [ "log", "majin-blob-core", "majin-blob-types", - "mockall 0.12.1", + "mockall 0.13.0", + "mockall_double", "mongodb", "num", "num-bigint 0.4.6", @@ -13241,6 +13258,28 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +[[package]] +name = "test-log" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" +dependencies = [ + "env_logger", + "test-log-macros", + "tracing-subscriber 0.3.18", +] + +[[package]] +name = "test-log-macros" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + [[package]] name = "testcontainers" version = "0.21.1" diff --git a/Cargo.toml b/Cargo.toml index 38eb10d1..4a46d9cf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,24 +23,37 @@ authors = ["Apoorv Sadana <@apoorvsadana>"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [workspace.dependencies] -num = { version = "0.4.1" } -async-trait = { version = "0.1.77" } alloy = { version = "0.2.1", features = ["full"] } +arc-swap = { version = "1.7.1" } +async-trait = { version = "0.1.77" } axum = { version = "0.7.4" } axum-macros = "0.4.1" bincode = "1.3.3" -color-eyre = "0.6.2" c-kzg = "1.0.0" +color-eyre = "0.6.2" dotenvy = "0.15.7" futures = "0.3.30" +hex = "0.4.3" +httpmock = { version = "0.7.0" } +itertools = "0.13.0" +lazy_static = "1.4.0" +log = "0.4.21" +mockall = "0.13.0" mongodb = { version = "2.8.2" } +num = { version = "0.4.1" } +num-bigint = { version = "0.4.4" } +num-traits = "0.2" omniqueue = { version = "0.2.0" } +once_cell = "1.19.0" reqwest = { version = "0.12.5" } rstest = "0.22.0" serde = { version = "1.0.197" } serde_json = "1.0.114" +stark_evm_adapter = "0.1.1" starknet = "0.11.0" tempfile = "3.8.1" +testcontainers = "0.21.1" +test-log = "0.2.16" thiserror = "1.0.57" tokio = { version = "1.37.0" } tokio-stream = "0.1.15" @@ -49,18 +62,6 @@ tracing = "0.1.40" tracing-subscriber = { version = "0.3.18" } url = { version = "2.5.0", features = ["serde"] } uuid = { version = "1.7.0", features = ["v4", "serde"] } -httpmock = { version = "0.7.0" } -num-bigint = { version = "0.4.4" } -arc-swap = { version = "1.7.1" } -num-traits = "0.2" -lazy_static = "1.4.0" -stark_evm_adapter = "0.1.1" -hex = "0.4.3" -itertools = "0.13.0" -mockall = "0.13.0" -testcontainers = "0.21.1" -once_cell = "1.19.0" - # Cairo VM cairo-vm = { version = "1.0.0-rc5", features = [ "extensive_hints", @@ -81,15 +82,15 @@ madara-prover-rpc-client = { git = "https://github.com/Moonsong-Labs/madara-prov da-client-interface = { path = "crates/da-clients/da-client-interface" } ethereum-da-client = { path = "crates/da-clients/ethereum" } -settlement-client-interface = { path = "crates/settlement-clients/settlement-client-interface" } ethereum-settlement-client = { path = "crates/settlement-clients/ethereum" } +settlement-client-interface = { path = "crates/settlement-clients/settlement-client-interface" } starknet-settlement-client = { path = "crates/settlement-clients/starknet" } -utils = { path = "crates/utils" } -prover-client-interface = { path = "crates/prover-services/prover-client-interface" } gps-fact-checker = { path = "crates/prover-services/gps-fact-checker" } -sharp-service = { path = "crates/prover-services/sharp-service" } orchestrator = { path = "crates/orchestrator" } +prover-client-interface = { path = "crates/prover-services/prover-client-interface" } +sharp-service = { path = "crates/prover-services/sharp-service" } +utils = { path = "crates/utils" } aptos-sdk = { git = "https://github.com/aptos-labs/aptos-core" } aptos-testcontainer = { git = "https://github.com/sota-zk-labs/aptos-testcontainer", branch = "master", features = [ diff --git a/crates/da-clients/ethereum/src/config.rs b/crates/da-clients/ethereum/src/config.rs index b50604b2..34370036 100644 --- a/crates/da-clients/ethereum/src/config.rs +++ b/crates/da-clients/ethereum/src/config.rs @@ -1,6 +1,8 @@ use std::str::FromStr; -use alloy::{network::Ethereum, providers::ProviderBuilder, rpc::client::RpcClient}; +use alloy::network::Ethereum; +use alloy::providers::ProviderBuilder; +use alloy::rpc::client::RpcClient; use async_trait::async_trait; use da_client_interface::DaConfig; use url::Url; diff --git a/crates/da-clients/ethereum/src/lib.rs b/crates/da-clients/ethereum/src/lib.rs index 68c933bd..e48f5ca7 100644 --- a/crates/da-clients/ethereum/src/lib.rs +++ b/crates/da-clients/ethereum/src/lib.rs @@ -20,8 +20,8 @@ pub struct EthereumDaClient { #[async_trait] impl DaClient for EthereumDaClient { async fn publish_state_diff(&self, _state_diff: Vec>, _to: &[u8; 32]) -> Result { - // Here in case of ethereum we are not publishing the state diff because we are doing it all together in update_state job. - // So we don't need to send the blob here. + // Here in case of ethereum we are not publishing the state diff because we are doing it all + // together in update_state job. So we don't need to send the blob here. Ok("NA".to_string()) } diff --git a/crates/orchestrator/Cargo.toml b/crates/orchestrator/Cargo.toml index b6566e8a..6d0121bd 100644 --- a/crates/orchestrator/Cargo.toml +++ b/crates/orchestrator/Cargo.toml @@ -34,7 +34,7 @@ ethereum-settlement-client = { workspace = true } futures = { workspace = true } hex = { workspace = true } lazy_static = { workspace = true } -log = "0.4.21" +log = { workspace = true } majin-blob-core = { git = "https://github.com/AbdelStark/majin-blob", branch = "main" } majin-blob-types = { git = "https://github.com/AbdelStark/majin-blob", branch = "main" } mockall = { version = "0.13.0" } @@ -70,6 +70,6 @@ with_sqs = ["omniqueue"] [dev-dependencies] assert_matches = "1.5.0" +httpmock = { workspace = true, features = ["remote"] } hyper = { version = "0.14", features = ["full"] } rstest = { workspace = true } -httpmock = { workspace = true, features = ["remote"] } diff --git a/crates/orchestrator/src/config.rs b/crates/orchestrator/src/config.rs index 447b25ec..66b4d285 100644 --- a/crates/orchestrator/src/config.rs +++ b/crates/orchestrator/src/config.rs @@ -1,8 +1,5 @@ use std::sync::Arc; -use crate::data_storage::aws_s3::config::{AWSS3Config, AWSS3ConfigType}; -use crate::data_storage::aws_s3::AWSS3; -use crate::data_storage::{DataStorage, DataStorageConfig}; use arc_swap::{ArcSwap, Guard}; use da_client_interface::{DaClient, DaConfig}; use dotenvy::dotenv; @@ -19,6 +16,9 @@ use utils::env_utils::get_env_var_or_panic; use utils::settings::default::DefaultSettingsProvider; use utils::settings::SettingsProvider; +use crate::data_storage::aws_s3::config::{AWSS3Config, AWSS3ConfigType}; +use crate::data_storage::aws_s3::AWSS3; +use crate::data_storage::{DataStorage, DataStorageConfig}; use crate::database::mongodb::config::MongoDbConfig; use crate::database::mongodb::MongoDb; use crate::database::{Database, DatabaseConfig}; diff --git a/crates/orchestrator/src/data_storage/aws_s3/mod.rs b/crates/orchestrator/src/data_storage/aws_s3/mod.rs index 990a5d0a..aed1882f 100644 --- a/crates/orchestrator/src/data_storage/aws_s3/mod.rs +++ b/crates/orchestrator/src/data_storage/aws_s3/mod.rs @@ -1,5 +1,3 @@ -use crate::data_storage::aws_s3::config::AWSS3ConfigType; -use crate::data_storage::DataStorage; use async_trait::async_trait; use aws_sdk_s3::config::{Builder, Credentials, Region}; use aws_sdk_s3::primitives::ByteStream; @@ -7,6 +5,9 @@ use aws_sdk_s3::Client; use bytes::Bytes; use color_eyre::Result; +use crate::data_storage::aws_s3::config::AWSS3ConfigType; +use crate::data_storage::DataStorage; + /// Module for AWS S3 config structs and implementations pub mod config; diff --git a/crates/orchestrator/src/data_storage/types.rs b/crates/orchestrator/src/data_storage/types.rs index 0fc1a6fb..31270558 100644 --- a/crates/orchestrator/src/data_storage/types.rs +++ b/crates/orchestrator/src/data_storage/types.rs @@ -1,6 +1,7 @@ +use std::collections::HashMap; + use cairo_vm::Felt252; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; /// This struct represents the contract changes that will be in `StarknetOsOutput` /// as a vector. diff --git a/crates/orchestrator/src/database/mongodb/mod.rs b/crates/orchestrator/src/database/mongodb/mod.rs index 1ef99917..a1d7aea6 100644 --- a/crates/orchestrator/src/database/mongodb/mod.rs +++ b/crates/orchestrator/src/database/mongodb/mod.rs @@ -1,18 +1,13 @@ -use async_std::stream::StreamExt; -use futures::TryStreamExt; use std::collections::HashMap; +use async_std::stream::StreamExt; use async_trait::async_trait; use color_eyre::eyre::eyre; use color_eyre::Result; -use mongodb::bson::{Bson, Document}; -use mongodb::options::{FindOneOptions, FindOptions, UpdateOptions}; -use mongodb::{ - bson, - bson::doc, - options::{ClientOptions, ServerApi, ServerApiVersion}, - Client, Collection, -}; +use futures::TryStreamExt; +use mongodb::bson::{doc, Bson, Document}; +use mongodb::options::{ClientOptions, FindOneOptions, FindOptions, ServerApi, ServerApiVersion, UpdateOptions}; +use mongodb::{bson, Client, Collection}; use uuid::Uuid; use crate::database::mongodb::config::MongoDbConfig; diff --git a/crates/orchestrator/src/jobs/da_job/mod.rs b/crates/orchestrator/src/jobs/da_job/mod.rs index 1fd3bd41..dc787830 100644 --- a/crates/orchestrator/src/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/jobs/da_job/mod.rs @@ -15,12 +15,11 @@ use starknet::providers::Provider; use tracing::log; use uuid::Uuid; +use super::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use super::Job; use crate::config::Config; use crate::constants::BLOB_DATA_FILE_NAME; -use super::Job; -use super::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; - lazy_static! { /// EIP-4844 BLS12-381 modulus. /// @@ -201,11 +200,7 @@ fn data_to_blobs(blob_size: u64, block_data: Vec) -> Result Ok(blobs) } -async fn state_update_to_blob_data( - block_no: u64, - state_update: StateUpdate, - config: &Config, -) -> Result> { +async fn state_update_to_blob_data(block_no: u64, state_update: StateUpdate, config: &Config) -> Result> { let state_diff = state_update.state_diff; let mut blob_data: Vec = vec![ Felt::from(state_diff.storage_diffs.len()), @@ -320,7 +315,12 @@ fn da_word(class_flag: bool, nonce_change: Option, num_changes: u64) -> Fe // checking for nonce here if let Some(_new_nonce) = nonce_change { - let bytes: [u8; 32] = nonce_change.expect("Not able to convert the nonce_change var into [u8; 32] type. Possible Error : Improper parameter length.").to_bytes_be(); + let bytes: [u8; 32] = nonce_change + .expect( + "Not able to convert the nonce_change var into [u8; 32] type. Possible Error : Improper parameter \ + length.", + ) + .to_bytes_be(); let biguint = BigUint::from_bytes_be(&bytes); let binary_string_local = format!("{:b}", biguint); let padded_binary_string = format!("{:0>64}", binary_string_local); @@ -357,12 +357,11 @@ mod tests { use rstest::rstest; use serde_json::json; + use super::*; // use majin_blob_types::serde; use crate::data_storage::MockDataStorage; use crate::tests::common::init_config; - use super::*; - #[rstest] #[case(false, 1, 1, "18446744073709551617")] #[case(false, 1, 0, "18446744073709551616")] @@ -382,22 +381,22 @@ mod tests { #[rstest] #[case( - 631861, - "src/jobs/da_job/test_data/state_update_from_block_631861.txt", - "src/jobs/da_job/test_data/test_blob_631861.txt", - "src/jobs/da_job/test_data/nonces_from_block_631861.txt" + 631861, + "src/jobs/da_job/test_data/state_update_from_block_631861.txt", + "src/jobs/da_job/test_data/test_blob_631861.txt", + "src/jobs/da_job/test_data/nonces_from_block_631861.txt" )] #[case( - 638353, - "src/jobs/da_job/test_data/state_update_from_block_638353.txt", - "src/jobs/da_job/test_data/test_blob_638353.txt", - "src/jobs/da_job/test_data/nonces_from_block_638353.txt" + 638353, + "src/jobs/da_job/test_data/state_update_from_block_638353.txt", + "src/jobs/da_job/test_data/test_blob_638353.txt", + "src/jobs/da_job/test_data/nonces_from_block_638353.txt" )] #[case( - 640641, - "src/jobs/da_job/test_data/state_update_from_block_640641.txt", - "src/jobs/da_job/test_data/test_blob_640641.txt", - "src/jobs/da_job/test_data/nonces_from_block_640641.txt" + 640641, + "src/jobs/da_job/test_data/state_update_from_block_640641.txt", + "src/jobs/da_job/test_data/test_blob_640641.txt", + "src/jobs/da_job/test_data/nonces_from_block_640641.txt" )] #[tokio::test] async fn test_state_update_to_blob_data( @@ -426,7 +425,7 @@ mod tests { None, Some(storage_client), ) - .await; + .await; get_nonce_attached(&server, nonce_file_path); @@ -523,10 +522,6 @@ mod tests { new_hex_chars = new_hex_chars.trim_start_matches('0').to_string(); // Handle the case where the trimmed string is empty (e.g., data was all zeros) - if new_hex_chars.is_empty() { - "0x0".to_string() - } else { - format!("0x{}", new_hex_chars) - } + if new_hex_chars.is_empty() { "0x0".to_string() } else { format!("0x{}", new_hex_chars) } } } diff --git a/crates/orchestrator/src/jobs/job_handler_factory.rs b/crates/orchestrator/src/jobs/job_handler_factory.rs index 5a4daef3..8f4d8a34 100644 --- a/crates/orchestrator/src/jobs/job_handler_factory.rs +++ b/crates/orchestrator/src/jobs/job_handler_factory.rs @@ -49,7 +49,8 @@ pub mod factory { /// /// - We create the MockJob /// - We return this mocked job whenever a function calls `get_job_handler` - /// - Making it an Arc allows us to return the same MockJob in multiple calls to `get_job_handler`. This is needed because `MockJob` doesn't implement Clone + /// - Making it an Arc allows us to return the same MockJob in multiple calls to + /// `get_job_handler`. This is needed because `MockJob` doesn't implement Clone pub async fn get_job_handler(job_type: &JobType) -> Arc> { // Original implementation let job: Box = match job_type { diff --git a/crates/orchestrator/src/jobs/state_update_job/mod.rs b/crates/orchestrator/src/jobs/state_update_job/mod.rs index d60c86d1..2e6167d6 100644 --- a/crates/orchestrator/src/jobs/state_update_job/mod.rs +++ b/crates/orchestrator/src/jobs/state_update_job/mod.rs @@ -7,16 +7,14 @@ use async_trait::async_trait; use cairo_vm::Felt252; use color_eyre::eyre::eyre; use color_eyre::Result; +use settlement_client_interface::SettlementVerificationStatus; use snos::io::output::StarknetOsOutput; use uuid::Uuid; -use settlement_client_interface::SettlementVerificationStatus; - use super::constants::{ JOB_METADATA_STATE_UPDATE_ATTEMPT_PREFIX, JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO, JOB_PROCESS_ATTEMPT_METADATA_KEY, }; - use crate::config::{config, Config}; use crate::constants::SNOS_OUTPUT_FILE_NAME; use crate::jobs::constants::JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY; @@ -76,13 +74,20 @@ impl Job for StateUpdateJob { self.insert_attempts_into_metadata(job, &attempt_no, &sent_tx_hashes); // external_id returned corresponds to the last block number settled - Ok(block_numbers.last().expect("Last number in block_numbers array returned as None. Possible Error : Delay in job processing or Failed job execution.").to_string()) + Ok(block_numbers + .last() + .expect( + "Last number in block_numbers array returned as None. Possible Error : Delay in job processing or \ + Failed job execution.", + ) + .to_string()) } /// Returns the status of the passed job. /// Status will be verified if: /// 1. the last settlement tx hash is successful, - /// 2. the expected last settled block from our configuration is indeed the one found in the provider. + /// 2. the expected last settled block from our configuration is indeed the one found in the + /// provider. async fn verify_job(&self, config: &Config, job: &mut JobItem) -> Result { let attempt_no = job.metadata.get(JOB_PROCESS_ATTEMPT_METADATA_KEY).expect("Could not find current attempt number.").clone(); @@ -115,7 +120,7 @@ impl Job for StateUpdateJob { return Ok(new_status.into()); } SettlementVerificationStatus::Pending => { - return Err(eyre!("Tx {tx_hash} should not be pending.")) + return Err(eyre!("Tx {tx_hash} should not be pending.")); } SettlementVerificationStatus::Verified => {} } diff --git a/crates/orchestrator/src/jobs/state_update_job/utils.rs b/crates/orchestrator/src/jobs/state_update_job/utils.rs index 1d92c9a4..c696b06c 100644 --- a/crates/orchestrator/src/jobs/state_update_job/utils.rs +++ b/crates/orchestrator/src/jobs/state_update_job/utils.rs @@ -1,6 +1,7 @@ +use color_eyre::eyre::eyre; + use crate::config::config; use crate::constants::BLOB_DATA_FILE_NAME; -use color_eyre::eyre::eyre; /// Fetching the blob data (stored in remote storage during DA job) for a particular block pub async fn fetch_blob_data_for_block(block_number: u64) -> color_eyre::Result>> { diff --git a/crates/orchestrator/src/queue/sqs/mod.rs b/crates/orchestrator/src/queue/sqs/mod.rs index 1598189a..9c30ee49 100644 --- a/crates/orchestrator/src/queue/sqs/mod.rs +++ b/crates/orchestrator/src/queue/sqs/mod.rs @@ -1,12 +1,12 @@ use std::time::Duration; -use crate::queue::job_queue::JOB_PROCESSING_QUEUE; use async_trait::async_trait; use color_eyre::Result; use omniqueue::backends::{SqsBackend, SqsConfig, SqsConsumer, SqsProducer}; use omniqueue::{Delivery, QueueError}; use utils::env_utils::get_env_var_or_panic; +use crate::queue::job_queue::JOB_PROCESSING_QUEUE; use crate::queue::QueueProvider; pub struct SqsQueue; diff --git a/crates/orchestrator/src/tests/config.rs b/crates/orchestrator/src/tests/config.rs index 2108e1a2..10a48dc2 100644 --- a/crates/orchestrator/src/tests/config.rs +++ b/crates/orchestrator/src/tests/config.rs @@ -1,10 +1,7 @@ use std::sync::Arc; -use crate::config::{build_da_client, build_prover_service, build_settlement_client, config_force_init, Config}; -use crate::data_storage::DataStorage; use da_client_interface::DaClient; use httpmock::MockServer; - use prover_client_interface::ProverClient; use settlement_client_interface::SettlementClient; use starknet::providers::jsonrpc::HttpTransport; @@ -12,6 +9,8 @@ use starknet::providers::{JsonRpcClient, Url}; use utils::env_utils::get_env_var_or_panic; use utils::settings::default::DefaultSettingsProvider; +use crate::config::{build_da_client, build_prover_service, build_settlement_client, config_force_init, Config}; +use crate::data_storage::DataStorage; use crate::database::mongodb::config::MongoDbConfig; use crate::database::mongodb::MongoDb; use crate::database::{Database, DatabaseConfig}; diff --git a/crates/orchestrator/src/tests/data_storage/mod.rs b/crates/orchestrator/src/tests/data_storage/mod.rs index d127917a..6eea74e9 100644 --- a/crates/orchestrator/src/tests/data_storage/mod.rs +++ b/crates/orchestrator/src/tests/data_storage/mod.rs @@ -1,12 +1,13 @@ -use crate::data_storage::aws_s3::config::{AWSS3ConfigType, S3LocalStackConfig}; -use crate::data_storage::aws_s3::AWSS3; -use crate::data_storage::{DataStorage, DataStorageConfig}; -use crate::tests::config::TestConfigBuilder; use bytes::Bytes; use rstest::rstest; use serde_json::json; use utils::env_utils::get_env_var_or_panic; +use crate::data_storage::aws_s3::config::{AWSS3ConfigType, S3LocalStackConfig}; +use crate::data_storage::aws_s3::AWSS3; +use crate::data_storage::{DataStorage, DataStorageConfig}; +use crate::tests::config::TestConfigBuilder; + /// This test checks the ability to put and get data from AWS S3 using `AWSS3`. /// It puts JSON data into a test bucket and retrieves it, verifying the data /// matches what was originally uploaded. diff --git a/crates/orchestrator/src/tests/database/mod.rs b/crates/orchestrator/src/tests/database/mod.rs index 42ae1a0e..2b3b9904 100644 --- a/crates/orchestrator/src/tests/database/mod.rs +++ b/crates/orchestrator/src/tests/database/mod.rs @@ -1,11 +1,13 @@ -use crate::config::{config, Config}; -use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; -use crate::tests::config::TestConfigBuilder; +use std::sync::Arc; + use arc_swap::Guard; use rstest::*; -use std::sync::Arc; use uuid::Uuid; +use crate::config::{config, Config}; +use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; +use crate::tests::config::TestConfigBuilder; + #[rstest] #[tokio::test] async fn test_database_connection() -> color_eyre::Result<()> { diff --git a/crates/orchestrator/src/tests/jobs/da_job/mod.rs b/crates/orchestrator/src/tests/jobs/da_job/mod.rs index bcc02050..08015170 100644 --- a/crates/orchestrator/src/tests/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/da_job/mod.rs @@ -1,223 +1,223 @@ -use crate::jobs::da_job::test::{get_nonce_attached, read_state_update_from_file}; -use crate::jobs::da_job::DaJob; -use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; -use crate::tests::common::drop_database; -use crate::tests::config::TestConfigBuilder; -use crate::{config::config, jobs::Job}; -use assert_matches::assert_matches; -use color_eyre::eyre::eyre; -use da_client_interface::MockDaClient; -use mockall::predicate::always; -use rstest::rstest; -use serde_json::json; -use starknet_core::types::{FieldElement, MaybePendingStateUpdate, PendingStateUpdate, StateDiff}; -use std::collections::HashMap; -use uuid::Uuid; - -/// Tests the DA Job's handling of a blob length exceeding the supported size. -/// It mocks the DA client to simulate the environment and expects an error on job processing. -/// Validates the error message for exceeding blob limits against the expected output. -/// Asserts correct behavior by comparing the received and expected error messages. -#[rstest] -#[case( - "src/tests/jobs/da_job/test_data/state_update/638353.txt", - "src/tests/jobs/da_job/test_data/nonces/638353.txt", - "63853", - 110 -)] -#[tokio::test] -async fn test_da_job_process_job_failure_on_small_blob_size( - #[case] state_update_file: String, - #[case] nonces_file: String, - #[case] internal_id: String, - #[case] current_blob_length: u64, -) { - // Mocking DA client calls - let mut da_client = MockDaClient::new(); - // dummy state will have more than 1200 bytes - da_client.expect_max_blob_per_txn().with().returning(|| 1); - da_client.expect_max_bytes_per_blob().with().returning(|| 1200); - - let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; - let config = config().await; - - let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); - - let state_update = MaybePendingStateUpdate::Update(state_update); - let state_update = serde_json::to_value(&state_update).unwrap(); - let response = json!({ "id": 640641,"jsonrpc":"2.0","result": state_update }); - - get_nonce_attached(&server, nonces_file.as_str()); - - let state_update_mock = server.mock(|when, then| { - when.path("/").body_contains("starknet_getStateUpdate"); - then.status(200).body(serde_json::to_vec(&response).unwrap()); - }); - - let max_blob_per_txn = config.da_client().max_blob_per_txn().await; - - let response = DaJob - .process_job( - config.as_ref(), - &mut JobItem { - id: Uuid::default(), - internal_id: internal_id.to_string(), - job_type: JobType::DataSubmission, - status: JobStatus::Created, - external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), - metadata: HashMap::default(), - version: 0, - }, - ) - .await; - - assert_matches!(response, - Err(e) => { - let expected_error = eyre!( - "Exceeded the maximum number of blobs per transaction: allowed {}, found {} for block {} and job id {}", - max_blob_per_txn, - current_blob_length, - internal_id.to_string(), - Uuid::default() - ) - .to_string(); - assert_eq!(e.to_string(), expected_error); - } - ); - - state_update_mock.assert(); - let _ = drop_database().await; -} - -/// Tests DA Job processing failure when a block is in pending state. -/// Simulates a pending block state update and expects job processing to fail. -/// Validates that the error message matches the expected pending state error. -/// Asserts correct behavior by comparing the received and expected error messages. -#[rstest] -#[tokio::test] -async fn test_da_job_process_job_failure_on_pending_block() { - let server = TestConfigBuilder::new().build().await; - let config = config().await; - let internal_id = "1"; - - let pending_state_update = MaybePendingStateUpdate::PendingUpdate(PendingStateUpdate { - old_root: FieldElement::default(), - state_diff: StateDiff { - storage_diffs: vec![], - deprecated_declared_classes: vec![], - declared_classes: vec![], - deployed_contracts: vec![], - replaced_classes: vec![], - nonces: vec![], - }, - }); - - let pending_state_update = serde_json::to_value(&pending_state_update).unwrap(); - let response = json!({ "id": 1,"jsonrpc":"2.0","result": pending_state_update }); - - let state_update_mock = server.mock(|when, then| { - when.path("/").body_contains("starknet_getStateUpdate"); - then.status(200).body(serde_json::to_vec(&response).unwrap()); - }); - - let response = DaJob - .process_job( - config.as_ref(), - &mut JobItem { - id: Uuid::default(), - internal_id: internal_id.to_string(), - job_type: JobType::DataSubmission, - status: JobStatus::Created, - external_id: ExternalId::String("1".to_string().into_boxed_str()), - metadata: HashMap::default(), - version: 0, - }, - ) - .await; - - assert_matches!(response, - Err(e) => { - let expected_error = eyre!( - "Cannot process block {} for job id {} as it's still in pending state", - internal_id.to_string(), - Uuid::default() - ) - .to_string(); - assert_eq!(e.to_string(), expected_error); - } - ); - - state_update_mock.assert(); -} - -/// Tests successful DA Job processing with valid state update and nonces files. -/// Mocks DA client to simulate environment and expects job to process without errors. -/// Validates the successful job processing by checking the return message "Done". -/// Asserts correct behavior by comparing the received and expected success messages. -#[rstest] -#[case( - "src/tests/jobs/da_job/test_data/state_update/631861.txt", - "src/tests/jobs/da_job/test_data/nonces/631861.txt", - "631861" -)] -#[case( - "src/tests/jobs/da_job/test_data/state_update/640641.txt", - "src/tests/jobs/da_job/test_data/nonces/640641.txt", - "640641" -)] -#[case( - "src/tests/jobs/da_job/test_data/state_update/638353.txt", - "src/tests/jobs/da_job/test_data/nonces/638353.txt", - "638353" -)] -#[tokio::test] -async fn test_da_job_process_job_success( - #[case] state_update_file: String, - #[case] nonces_file: String, - #[case] internal_id: String, -) { - // Mocking DA client calls - let mut da_client = MockDaClient::new(); - da_client.expect_publish_state_diff().with(always(), always()).returning(|_, _| Ok("Done".to_string())); - da_client.expect_max_blob_per_txn().with().returning(|| 6); - da_client.expect_max_bytes_per_blob().with().returning(|| 131072); - - let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; - let config = config().await; - - let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); - - let state_update = serde_json::to_value(&state_update).unwrap(); - let response = json!({ "id": 1,"jsonrpc":"2.0","result": state_update }); - - get_nonce_attached(&server, nonces_file.as_str()); - - let state_update_mock = server.mock(|when, then| { - when.path("/").body_contains("starknet_getStateUpdate"); - then.status(200).body(serde_json::to_vec(&response).unwrap()); - }); - - let response = DaJob - .process_job( - config.as_ref(), - &mut JobItem { - id: Uuid::default(), - internal_id: internal_id.to_string(), - job_type: JobType::DataSubmission, - status: JobStatus::Created, - external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), - metadata: HashMap::default(), - version: 0, - }, - ) - .await; - - assert_matches!(response, - Ok(msg) => { - assert_eq!(msg, eyre!("Done").to_string()); - } - ); - - state_update_mock.assert(); - let _ = drop_database().await; -} +// use crate::jobs::da_job::test::{get_nonce_attached, read_state_update_from_file}; +// use crate::jobs::da_job::DaJob; +// use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; +// use crate::tests::common::drop_database; +// use crate::tests::config::TestConfigBuilder; +// use crate::{config::config, jobs::Job}; +// use assert_matches::assert_matches; +// use color_eyre::eyre::eyre; +// use da_client_interface::MockDaClient; +// use mockall::predicate::always; +// use rstest::rstest; +// use serde_json::json; +// use starknet_core::types::{FieldElement, MaybePendingStateUpdate, PendingStateUpdate, StateDiff}; +// use std::collections::HashMap; +// use uuid::Uuid; +// +// /// Tests the DA Job's handling of a blob length exceeding the supported size. +// /// It mocks the DA client to simulate the environment and expects an error on job processing. +// /// Validates the error message for exceeding blob limits against the expected output. +// /// Asserts correct behavior by comparing the received and expected error messages. +// #[rstest] +// #[case( +// "src/tests/jobs/da_job/test_data/state_update/638353.txt", +// "src/tests/jobs/da_job/test_data/nonces/638353.txt", +// "63853", +// 110 +// )] +// #[tokio::test] +// async fn test_da_job_process_job_failure_on_small_blob_size( +// #[case] state_update_file: String, +// #[case] nonces_file: String, +// #[case] internal_id: String, +// #[case] current_blob_length: u64, +// ) { +// // Mocking DA client calls +// let mut da_client = MockDaClient::new(); +// // dummy state will have more than 1200 bytes +// da_client.expect_max_blob_per_txn().with().returning(|| 1); +// da_client.expect_max_bytes_per_blob().with().returning(|| 1200); +// +// let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; +// let config = config().await; +// +// let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); +// +// let state_update = MaybePendingStateUpdate::Update(state_update); +// let state_update = serde_json::to_value(&state_update).unwrap(); +// let response = json!({ "id": 640641,"jsonrpc":"2.0","result": state_update }); +// +// get_nonce_attached(&server, nonces_file.as_str()); +// +// let state_update_mock = server.mock(|when, then| { +// when.path("/").body_contains("starknet_getStateUpdate"); +// then.status(200).body(serde_json::to_vec(&response).unwrap()); +// }); +// +// let max_blob_per_txn = config.da_client().max_blob_per_txn().await; +// +// let response = DaJob +// .process_job( +// config.as_ref(), +// &mut JobItem { +// id: Uuid::default(), +// internal_id: internal_id.to_string(), +// job_type: JobType::DataSubmission, +// status: JobStatus::Created, +// external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), +// metadata: HashMap::default(), +// version: 0, +// }, +// ) +// .await; +// +// assert_matches!(response, +// Err(e) => { +// let expected_error = eyre!( +// "Exceeded the maximum number of blobs per transaction: allowed {}, found {} for block {} and job id {}", +// max_blob_per_txn, +// current_blob_length, +// internal_id.to_string(), +// Uuid::default() +// ) +// .to_string(); +// assert_eq!(e.to_string(), expected_error); +// } +// ); +// +// state_update_mock.assert(); +// let _ = drop_database().await; +// } +// +// /// Tests DA Job processing failure when a block is in pending state. +// /// Simulates a pending block state update and expects job processing to fail. +// /// Validates that the error message matches the expected pending state error. +// /// Asserts correct behavior by comparing the received and expected error messages. +// #[rstest] +// #[tokio::test] +// async fn test_da_job_process_job_failure_on_pending_block() { +// let server = TestConfigBuilder::new().build().await; +// let config = config().await; +// let internal_id = "1"; +// +// let pending_state_update = MaybePendingStateUpdate::PendingUpdate(PendingStateUpdate { +// old_root: FieldElement::default(), +// state_diff: StateDiff { +// storage_diffs: vec![], +// deprecated_declared_classes: vec![], +// declared_classes: vec![], +// deployed_contracts: vec![], +// replaced_classes: vec![], +// nonces: vec![], +// }, +// }); +// +// let pending_state_update = serde_json::to_value(&pending_state_update).unwrap(); +// let response = json!({ "id": 1,"jsonrpc":"2.0","result": pending_state_update }); +// +// let state_update_mock = server.mock(|when, then| { +// when.path("/").body_contains("starknet_getStateUpdate"); +// then.status(200).body(serde_json::to_vec(&response).unwrap()); +// }); +// +// let response = DaJob +// .process_job( +// config.as_ref(), +// &mut JobItem { +// id: Uuid::default(), +// internal_id: internal_id.to_string(), +// job_type: JobType::DataSubmission, +// status: JobStatus::Created, +// external_id: ExternalId::String("1".to_string().into_boxed_str()), +// metadata: HashMap::default(), +// version: 0, +// }, +// ) +// .await; +// +// assert_matches!(response, +// Err(e) => { +// let expected_error = eyre!( +// "Cannot process block {} for job id {} as it's still in pending state", +// internal_id.to_string(), +// Uuid::default() +// ) +// .to_string(); +// assert_eq!(e.to_string(), expected_error); +// } +// ); +// +// state_update_mock.assert(); +// } +// +// /// Tests successful DA Job processing with valid state update and nonces files. +// /// Mocks DA client to simulate environment and expects job to process without errors. +// /// Validates the successful job processing by checking the return message "Done". +// /// Asserts correct behavior by comparing the received and expected success messages. +// #[rstest] +// #[case( +// "src/tests/jobs/da_job/test_data/state_update/631861.txt", +// "src/tests/jobs/da_job/test_data/nonces/631861.txt", +// "631861" +// )] +// #[case( +// "src/tests/jobs/da_job/test_data/state_update/640641.txt", +// "src/tests/jobs/da_job/test_data/nonces/640641.txt", +// "640641" +// )] +// #[case( +// "src/tests/jobs/da_job/test_data/state_update/638353.txt", +// "src/tests/jobs/da_job/test_data/nonces/638353.txt", +// "638353" +// )] +// #[tokio::test] +// async fn test_da_job_process_job_success( +// #[case] state_update_file: String, +// #[case] nonces_file: String, +// #[case] internal_id: String, +// ) { +// // Mocking DA client calls +// let mut da_client = MockDaClient::new(); +// da_client.expect_publish_state_diff().with(always(), always()).returning(|_, _| Ok("Done".to_string())); +// da_client.expect_max_blob_per_txn().with().returning(|| 6); +// da_client.expect_max_bytes_per_blob().with().returning(|| 131072); +// +// let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; +// let config = config().await; +// +// let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); +// +// let state_update = serde_json::to_value(&state_update).unwrap(); +// let response = json!({ "id": 1,"jsonrpc":"2.0","result": state_update }); +// +// get_nonce_attached(&server, nonces_file.as_str()); +// +// let state_update_mock = server.mock(|when, then| { +// when.path("/").body_contains("starknet_getStateUpdate"); +// then.status(200).body(serde_json::to_vec(&response).unwrap()); +// }); +// +// let response = DaJob +// .process_job( +// config.as_ref(), +// &mut JobItem { +// id: Uuid::default(), +// internal_id: internal_id.to_string(), +// job_type: JobType::DataSubmission, +// status: JobStatus::Created, +// external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), +// metadata: HashMap::default(), +// version: 0, +// }, +// ) +// .await; +// +// assert_matches!(response, +// Ok(msg) => { +// assert_eq!(msg, eyre!("Done").to_string()); +// } +// ); +// +// state_update_mock.assert(); +// let _ = drop_database().await; +// } diff --git a/crates/orchestrator/src/tests/jobs/mod.rs b/crates/orchestrator/src/tests/jobs/mod.rs index b40326aa..4b1b1888 100644 --- a/crates/orchestrator/src/tests/jobs/mod.rs +++ b/crates/orchestrator/src/tests/jobs/mod.rs @@ -7,11 +7,11 @@ pub mod proving_job; #[cfg(test)] pub mod state_update_job; -use assert_matches::assert_matches; use std::collections::HashMap; use std::sync::Arc; use std::time::Duration; +use assert_matches::assert_matches; use mockall::predicate::eq; use mongodb::bson::doc; use omniqueue::QueueError; diff --git a/crates/orchestrator/src/tests/jobs/proving_job/mod.rs b/crates/orchestrator/src/tests/jobs/proving_job/mod.rs index 6eeaaef7..35ddd7fa 100644 --- a/crates/orchestrator/src/tests/jobs/proving_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/proving_job/mod.rs @@ -1,12 +1,12 @@ use std::collections::HashMap; -use crate::config::{config, config_force_init}; use httpmock::prelude::*; use prover_client_interface::{MockProverClient, TaskStatus}; use rstest::*; use uuid::Uuid; use super::super::common::{default_job_item, init_config}; +use crate::config::{config, config_force_init}; use crate::jobs::constants::JOB_METADATA_CAIRO_PIE_PATH_KEY; use crate::jobs::proving_job::ProvingJob; use crate::jobs::types::{JobItem, JobStatus, JobType}; diff --git a/crates/orchestrator/src/tests/workers/utils/mod.rs b/crates/orchestrator/src/tests/workers/utils/mod.rs index 03dd0cd3..c4fcd5da 100644 --- a/crates/orchestrator/src/tests/workers/utils/mod.rs +++ b/crates/orchestrator/src/tests/workers/utils/mod.rs @@ -1,10 +1,12 @@ +use std::collections::HashMap; + +use mockall::predicate::eq; +use uuid::Uuid; + use crate::database::MockDatabase; use crate::jobs::constants::JOB_METADATA_CAIRO_PIE_PATH_KEY; use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; use crate::jobs::MockJob; -use mockall::predicate::eq; -use std::collections::HashMap; -use uuid::Uuid; pub fn get_job_item_mock_by_id(id: String, uuid: Uuid) -> JobItem { JobItem { diff --git a/crates/orchestrator/src/workers/data_submission_worker.rs b/crates/orchestrator/src/workers/data_submission_worker.rs index 3c2d4331..66d7d88b 100644 --- a/crates/orchestrator/src/workers/data_submission_worker.rs +++ b/crates/orchestrator/src/workers/data_submission_worker.rs @@ -1,10 +1,12 @@ +use std::collections::HashMap; +use std::error::Error; + +use async_trait::async_trait; + use crate::config::config; use crate::jobs::create_job; use crate::jobs::types::{JobStatus, JobType}; use crate::workers::Worker; -use async_trait::async_trait; -use std::collections::HashMap; -use std::error::Error; pub struct DataSubmissionWorker; @@ -38,7 +40,8 @@ impl Worker for DataSubmissionWorker { let latest_data_submission_id: u64 = latest_data_submission_job_id.parse()?; let latest_proven_id: u64 = latest_proven_job_id.parse()?; - // creating data submission jobs for latest blocks that don't have existing data submission jobs yet. + // creating data submission jobs for latest blocks that don't have existing data submission jobs + // yet. for new_job_id in latest_data_submission_id + 1..latest_proven_id + 1 { create_job(JobType::DataSubmission, new_job_id.to_string(), HashMap::new()).await?; } diff --git a/crates/orchestrator/src/workers/mod.rs b/crates/orchestrator/src/workers/mod.rs index 785b1e2d..eb0ad793 100644 --- a/crates/orchestrator/src/workers/mod.rs +++ b/crates/orchestrator/src/workers/mod.rs @@ -1,7 +1,10 @@ -use crate::{config::config, jobs::types::JobStatus}; -use async_trait::async_trait; use std::error::Error; +use async_trait::async_trait; + +use crate::config::config; +use crate::jobs::types::JobStatus; + pub mod data_submission_worker; pub mod proof_registration; pub mod proving; @@ -20,8 +23,8 @@ pub trait Worker: Send + Sync { async fn run_worker(&self) -> Result<(), Box>; // Assumption - // If say a job for block X fails, we don't want the worker to respawn another job for the same block - // we will resolve the existing failed job first. + // If say a job for block X fails, we don't want the worker to respawn another job for the same + // block we will resolve the existing failed job first. // We assume the system to keep working till a job hasn't failed, // as soon as it fails we currently halt any more execution and wait for manual intervention. diff --git a/crates/orchestrator/src/workers/proving.rs b/crates/orchestrator/src/workers/proving.rs index 4ec85b91..7fcb7194 100644 --- a/crates/orchestrator/src/workers/proving.rs +++ b/crates/orchestrator/src/workers/proving.rs @@ -1,9 +1,11 @@ +use std::error::Error; + +use async_trait::async_trait; + use crate::config::config; use crate::jobs::create_job; use crate::jobs::types::{JobStatus, JobType}; use crate::workers::Worker; -use async_trait::async_trait; -use std::error::Error; pub struct ProvingWorker; diff --git a/crates/prover-services/gps-fact-checker/src/fact_info.rs b/crates/prover-services/gps-fact-checker/src/fact_info.rs index 7cf6a16e..d0aff512 100644 --- a/crates/prover-services/gps-fact-checker/src/fact_info.rs +++ b/crates/prover-services/gps-fact-checker/src/fact_info.rs @@ -31,7 +31,9 @@ pub fn get_fact_info(cairo_pie: &CairoPie, program_hash: Option) -> Result let fact_topology = get_fact_topology(cairo_pie, program_output.len())?; let program_hash = match program_hash { Some(hash) => hash, - None => Felt::from_bytes_be(&compute_program_hash_chain(&cairo_pie.metadata.program, BOOTLOADER_VERSION)?.to_bytes_be()), + None => Felt::from_bytes_be( + &compute_program_hash_chain(&cairo_pie.metadata.program, BOOTLOADER_VERSION)?.to_bytes_be(), + ), }; let output_root = generate_merkle_root(&program_output, &fact_topology)?; let fact = keccak256([program_hash.to_bytes_be(), *output_root.node_hash].concat()); diff --git a/crates/settlement-clients/aptos/Cargo.toml b/crates/settlement-clients/aptos/Cargo.toml index 27ac7e64..15d676fa 100644 --- a/crates/settlement-clients/aptos/Cargo.toml +++ b/crates/settlement-clients/aptos/Cargo.toml @@ -13,8 +13,12 @@ async-trait = { workspace = true } color-eyre = { workspace = true } dotenvy = { workspace = true } hex = { workspace = true } +log = { workspace = true } mockall = { workspace = true } once_cell = { workspace = true } settlement-client-interface = { workspace = true } tokio = { workspace = true } utils = { workspace = true } + +[dev-dependencies] +test-log = { workspace = true } diff --git a/crates/settlement-clients/aptos/src/config.rs b/crates/settlement-clients/aptos/src/config.rs index 6dba9b31..1b22c350 100644 --- a/crates/settlement-clients/aptos/src/config.rs +++ b/crates/settlement-clients/aptos/src/config.rs @@ -1,5 +1,4 @@ use dotenvy::dotenv; - use settlement_client_interface::SettlementConfig; use utils::env_utils::get_env_var_or_panic; diff --git a/crates/settlement-clients/aptos/src/helper.rs b/crates/settlement-clients/aptos/src/helper.rs index f9a25da9..5eff60f7 100644 --- a/crates/settlement-clients/aptos/src/helper.rs +++ b/crates/settlement-clients/aptos/src/helper.rs @@ -18,7 +18,7 @@ pub(crate) fn build_transaction( ) .sender(sender.address()) .sequence_number(i) - .max_gas_amount(100000) + .max_gas_amount(30000) .gas_unit_price(100) .build(); sender.sign_transaction(tx) diff --git a/crates/settlement-clients/aptos/src/lib.rs b/crates/settlement-clients/aptos/src/lib.rs index 6f3ba873..eab09f11 100644 --- a/crates/settlement-clients/aptos/src/lib.rs +++ b/crates/settlement-clients/aptos/src/lib.rs @@ -16,7 +16,6 @@ use aptos_sdk::types::LocalAccount; use async_trait::async_trait; use color_eyre::eyre; use mockall::automock; - use settlement_client_interface::{SettlementClient, SettlementVerificationStatus}; use crate::config::AptosSettlementConfig; @@ -89,8 +88,14 @@ impl SettlementClient for AptosSettlementClient { .await .expect("Failed to submit update state transaction") .into_inner(); - - Ok(tx.transaction_info().unwrap().hash.to_string()) + let transaction_info = tx.transaction_info().unwrap(); + log::info!( + "update_state_calldata finished: hash={}; gas={}", + transaction_info.hash.to_string(), + transaction_info.gas_used + ); + + Ok(transaction_info.hash.to_string()) } #[allow(unused)] @@ -131,7 +136,14 @@ impl SettlementClient for AptosSettlementClient { .expect("Failed to submit update state transaction") .into_inner(); - Ok(tx.transaction_info().unwrap().hash.to_string()) + let transaction_info = tx.transaction_info().unwrap(); + log::info!( + "update_state_blobs finished: hash={}; gas={}", + transaction_info.hash.to_string(), + transaction_info.gas_used + ); + + Ok(transaction_info.hash.to_string()) } async fn verify_tx_inclusion(&self, tx_hash: &str) -> eyre::Result { @@ -183,8 +195,8 @@ mod test { use aptos_sdk::move_types::u256; use aptos_sdk::types::chain_id::NamedChain::TESTING; use aptos_testcontainer::test_utils::aptos_container_test_utils::{lazy_aptos_container, run}; - use settlement_client_interface::{SettlementClient, SettlementVerificationStatus}; + use test_log::test; use crate::config::AptosSettlementConfig; use crate::helper::build_transaction; @@ -214,7 +226,13 @@ mod test { ), )); let tx = build_transaction(payload, &settlement_client.account, settlement_client.chain_id); - settlement_client.client.submit_and_wait(&tx).await.expect("Failed to init state!"); + let tx = settlement_client.client.submit_and_wait(&tx).await.expect("Failed to init state!").into_inner(); + let transaction_info = tx.transaction_info().unwrap(); + log::info!( + "update_state_blobs finished: hash={}; gas={}", + transaction_info.hash.to_string(), + transaction_info.gas_used + ); } async fn aptos_fact_registry(settlement_client: &AptosSettlementClient, fact: &str) { @@ -225,10 +243,16 @@ mod test { serialize_values(vec![&MoveValue::vector_u8(hex::decode(fact).unwrap())].into_iter()), )); let tx = build_transaction(payload, &settlement_client.account, settlement_client.chain_id); - settlement_client.client.submit_and_wait(&tx).await.expect("Failed to registry fact!"); + let tx = settlement_client.client.submit_and_wait(&tx).await.expect("Failed to registry fact!").into_inner(); + let transaction_info = tx.transaction_info().unwrap(); + log::info!( + "update_state_blobs finished: hash={}; gas={}", + transaction_info.hash.to_string(), + transaction_info.gas_used + ); } - #[tokio::test] + #[test(tokio::test)] async fn test_update_state_calldata() -> anyhow::Result<()> { run(1, |accounts| { Box::pin(async move { @@ -291,14 +315,14 @@ mod test { .update_state_calldata(program_output, onchain_data_hash, onchain_data_size) .await .expect("Failed to submit blob!"); - eprintln!("result = {:#?}", result); + log::info!("result = {:#?}", result); let verify_inclusion = settlement_client.verify_tx_inclusion(result.as_str()).await.unwrap(); - eprintln!("verify_inclusion = {:#?}", verify_inclusion); + log::info!("verify_inclusion = {:#?}", verify_inclusion); assert_eq!(verify_inclusion, SettlementVerificationStatus::Verified); let block_number = settlement_client.get_last_settled_block().await.unwrap(); - eprintln!("block_number = {:#?}", block_number); + log::info!("block_number = {:#?}", block_number); Ok(()) }) }) diff --git a/crates/settlement-clients/ethereum/src/clients/interfaces/validity_interface.rs b/crates/settlement-clients/ethereum/src/clients/interfaces/validity_interface.rs index 7b8a31a1..37bf76b0 100644 --- a/crates/settlement-clients/ethereum/src/clients/interfaces/validity_interface.rs +++ b/crates/settlement-clients/ethereum/src/clients/interfaces/validity_interface.rs @@ -1,16 +1,14 @@ use std::sync::Arc; +use alloy::network::Ethereum; +use alloy::primitives::{I256, U256}; +use alloy::providers::Provider; +use alloy::rpc::types::eth::TransactionReceipt; +use alloy::sol; +use alloy::transports::http::Http; +use alloy::transports::{RpcError, TransportErrorKind}; use async_trait::async_trait; -use alloy::{ - network::Ethereum, - primitives::{I256, U256}, - providers::Provider, - rpc::types::eth::TransactionReceipt, - sol, - transports::{http::Http, RpcError, TransportErrorKind}, -}; - use crate::types::LocalWalletSignerMiddleware; // TODO: should be moved to Zaun: diff --git a/crates/settlement-clients/ethereum/src/clients/validity.rs b/crates/settlement-clients/ethereum/src/clients/validity.rs index 8ed89c5b..575c6748 100644 --- a/crates/settlement-clients/ethereum/src/clients/validity.rs +++ b/crates/settlement-clients/ethereum/src/clients/validity.rs @@ -1,6 +1,8 @@ use std::sync::Arc; -use alloy::{network::Ethereum, primitives::Address, transports::http::Http}; +use alloy::network::Ethereum; +use alloy::primitives::Address; +use alloy::transports::http::Http; use crate::clients::interfaces::validity_interface::StarknetValidityContract; use crate::types::LocalWalletSignerMiddleware; diff --git a/crates/settlement-clients/ethereum/src/conversion.rs b/crates/settlement-clients/ethereum/src/conversion.rs index c86eb89a..ea4ab011 100644 --- a/crates/settlement-clients/ethereum/src/conversion.rs +++ b/crates/settlement-clients/ethereum/src/conversion.rs @@ -1,7 +1,7 @@ use alloy::primitives::U256; -/// Converts a `&[Vec]` to `Vec`. Each inner slice is expected to be exactly 32 bytes long. -/// Pads with zeros if any inner slice is shorter than 32 bytes. +/// Converts a `&[Vec]` to `Vec`. Each inner slice is expected to be exactly 32 bytes +/// long. Pads with zeros if any inner slice is shorter than 32 bytes. pub(crate) fn slice_slice_u8_to_vec_u256(slices: &[[u8; 32]]) -> Vec { slices.iter().map(|slice| slice_u8_to_u256(slice)).collect() } diff --git a/crates/settlement-clients/ethereum/src/lib.rs b/crates/settlement-clients/ethereum/src/lib.rs index dde57309..68603087 100644 --- a/crates/settlement-clients/ethereum/src/lib.rs +++ b/crates/settlement-clients/ethereum/src/lib.rs @@ -3,35 +3,34 @@ pub mod config; pub mod conversion; pub mod types; +use std::fmt::Write; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::Arc; + use alloy::consensus::{ BlobTransactionSidecar, SignableTransaction, TxEip4844, TxEip4844Variant, TxEip4844WithSidecar, TxEnvelope, }; use alloy::eips::eip2718::Encodable2718; use alloy::eips::eip2930::AccessList; use alloy::eips::eip4844::BYTES_PER_BLOB; -use alloy::primitives::{Bytes, FixedBytes}; -use alloy::{ - network::EthereumWallet, - primitives::{Address, B256, U256}, - providers::{PendingTransactionConfig, Provider, ProviderBuilder}, - rpc::types::TransactionReceipt, - signers::local::PrivateKeySigner, -}; +use alloy::network::EthereumWallet; +use alloy::primitives::{Address, Bytes, FixedBytes, B256, U256}; +use alloy::providers::{PendingTransactionConfig, Provider, ProviderBuilder}; +use alloy::rpc::types::TransactionReceipt; +use alloy::signers::local::PrivateKeySigner; use async_trait::async_trait; use c_kzg::{Blob, Bytes32, KzgCommitment, KzgProof, KzgSettings}; use color_eyre::eyre::eyre; use color_eyre::Result; -use mockall::{automock, lazy_static, predicate::*}; +use mockall::predicate::*; +use mockall::{automock, lazy_static}; use rstest::rstest; -use std::fmt::Write; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::sync::Arc; - -use crate::clients::interfaces::validity_interface::StarknetValidityContractTrait; use settlement_client_interface::{SettlementClient, SettlementVerificationStatus, SETTLEMENT_SETTINGS_NAME}; -use utils::{env_utils::get_env_var_or_panic, settings::SettingsProvider}; +use utils::env_utils::get_env_var_or_panic; +use utils::settings::SettingsProvider; +use crate::clients::interfaces::validity_interface::StarknetValidityContractTrait; use crate::clients::StarknetValidityContractClient; use crate::config::EthereumSettlementConfig; use crate::conversion::{slice_slice_u8_to_vec_u256, slice_u8_to_u256}; @@ -100,11 +99,7 @@ impl EthereumSettlementClient { &KZG_SETTINGS, )?; - if !eval { - Err(eyre!("ERROR : Assertion failed, not able to verify the proof.")) - } else { - Ok(kzg_proof) - } + if !eval { Err(eyre!("ERROR : Assertion failed, not able to verify the proof.")) } else { Ok(kzg_proof) } } } @@ -249,7 +244,8 @@ async fn prepare_sidecar( fn get_txn_input_bytes(program_output: Vec<[u8; 32]>, kzg_proof: [u8; 48]) -> Bytes { let program_output_hex_string = vec_u8_32_to_hex_string(program_output); let kzg_proof_hex_string = u8_48_to_hex_string(kzg_proof); - // cast keccak "updateStateKzgDA(uint256[] calldata programOutput, bytes calldata kzgProof)" | cut -b 1-10 + // cast keccak "updateStateKzgDA(uint256[] calldata programOutput, bytes calldata kzgProof)" | cut + // -b 1-10 let function_selector = "0x1a790556"; Bytes::from(program_output_hex_string + &kzg_proof_hex_string + function_selector) diff --git a/crates/settlement-clients/ethereum/src/types.rs b/crates/settlement-clients/ethereum/src/types.rs index 3415ee0a..6ec5914c 100644 --- a/crates/settlement-clients/ethereum/src/types.rs +++ b/crates/settlement-clients/ethereum/src/types.rs @@ -1,11 +1,7 @@ -use alloy::{ - network::{Ethereum, EthereumWallet}, - providers::{ - fillers::{ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller, WalletFiller}, - Identity, RootProvider, - }, - transports::http::{Client, Http}, -}; +use alloy::network::{Ethereum, EthereumWallet}; +use alloy::providers::fillers::{ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller, WalletFiller}; +use alloy::providers::{Identity, RootProvider}; +use alloy::transports::http::{Client, Http}; pub type LocalWalletSignerMiddleware = FillProvider< JoinFill< diff --git a/crates/settlement-clients/starknet/src/lib.rs b/crates/settlement-clients/starknet/src/lib.rs index d36121e0..ab3eb795 100644 --- a/crates/settlement-clients/starknet/src/lib.rs +++ b/crates/settlement-clients/starknet/src/lib.rs @@ -4,22 +4,16 @@ use async_trait::async_trait; use color_eyre::eyre::eyre; use color_eyre::Result; use lazy_static::lazy_static; -use mockall::{automock, predicate::*}; -use starknet::{ - accounts::{Account, Call, ExecutionEncoding, SingleOwnerAccount}, - core::{ - types::{BlockId, BlockTag, Felt, FunctionCall}, - utils::get_selector_from_name, - }, - providers::{jsonrpc::HttpTransport, JsonRpcClient}, - signers::{LocalWallet, SigningKey}, -}; -use starknet::accounts::ConnectedAccount; -use starknet::core::types::ExecutionResult; -use starknet::providers::Provider; -use tokio::time::{Duration, sleep}; - -use settlement_client_interface::{SETTLEMENT_SETTINGS_NAME, SettlementClient, SettlementVerificationStatus}; +use mockall::automock; +use mockall::predicate::*; +use settlement_client_interface::{SettlementClient, SettlementVerificationStatus, SETTLEMENT_SETTINGS_NAME}; +use starknet::accounts::{Account, Call, ConnectedAccount, ExecutionEncoding, SingleOwnerAccount}; +use starknet::core::types::{BlockId, BlockTag, ExecutionResult, Felt, FunctionCall}; +use starknet::core::utils::get_selector_from_name; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::{JsonRpcClient, Provider}; +use starknet::signers::{LocalWallet, SigningKey}; +use tokio::time::{sleep, Duration}; use utils::env_utils::get_env_var_or_panic; use utils::settings::SettingsProvider; @@ -85,8 +79,8 @@ lazy_static! { get_selector_from_name("stateBlockNumber").expect("Invalid update state selector"); } -// TODO: Note that we already have an implementation of the appchain core contract client available here: -// https://github.com/keep-starknet-strange/zaun/tree/main/crates/l3/appchain-core-contract-client +// TODO: Note that we already have an implementation of the appchain core contract client available +// here: https://github.com/keep-starknet-strange/zaun/tree/main/crates/l3/appchain-core-contract-client // However, this implementation uses different Felt types, and incorporating all of them // into this repository would introduce unnecessary complexity. // Therefore, we will wait for the update of starknet_rs in the Zaun repository before adapting @@ -141,7 +135,7 @@ impl SettlementClient for StarknetSettlementClient { return Ok(SettlementVerificationStatus::Pending); } match tx_receipt.receipt.execution_result() { - ExecutionResult::Succeeded => { Ok(SettlementVerificationStatus::Verified) } + ExecutionResult::Succeeded => Ok(SettlementVerificationStatus::Verified), ExecutionResult::Reverted { reason } => { Ok(SettlementVerificationStatus::Rejected(format!("Tx {} has been reverted: {}", tx_hash, reason))) } diff --git a/ionia b/ionia index bdbc6c28..3a15977d 160000 --- a/ionia +++ b/ionia @@ -1 +1 @@ -Subproject commit bdbc6c284524d00f485d9c90c49518e6402b500f +Subproject commit 3a15977d749128ebb993ff388405477d305427c6