diff --git a/CHANGELOG.md b/CHANGELOG.md index 92536c8a..4795228b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,12 +11,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - Tests for updating the state. - Function to update the state and publish blob on ethereum in state update job. - Fixtures for testing. +- Tests for DA job. - Added basic rust-toolchain support. + ## Changed - GitHub's coverage CI yml file for localstack and db testing. - Orchestrator :Moved TestConfigBuilder to `config.rs` in tests folder. +- Shifted Unit tests to test folder for DA job. ## Removed diff --git a/Cargo.lock b/Cargo.lock index 57c82755..706208f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6278,6 +6278,7 @@ version = "0.1.0" dependencies = [ "alloy 0.1.2", "arc-swap", + "assert_matches", "async-std", "async-trait", "aws-config", diff --git a/crates/orchestrator/Cargo.toml b/crates/orchestrator/Cargo.toml index 6ca4c43f..76c23732 100644 --- a/crates/orchestrator/Cargo.toml +++ b/crates/orchestrator/Cargo.toml @@ -66,6 +66,7 @@ with_mongodb = ["mongodb"] with_sqs = ["omniqueue"] [dev-dependencies] +assert_matches = "1.5.0" hyper = { version = "0.14", features = ["full"] } rstest = { workspace = true } httpmock = { workspace = true, features = ["remote"] } diff --git a/crates/orchestrator/src/jobs/da_job/mod.rs b/crates/orchestrator/src/jobs/da_job/mod.rs index 4b21b9c4..5e49d75c 100644 --- a/crates/orchestrator/src/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/jobs/da_job/mod.rs @@ -63,6 +63,7 @@ impl Job for DaJob { async fn process_job(&self, config: &Config, job: &mut JobItem) -> Result { let block_no = job.internal_id.parse::()?; + let state_update = config.starknet_client().get_state_update(BlockId::Number(block_no)).await?; let state_update = match state_update { @@ -127,7 +128,7 @@ impl Job for DaJob { } } -fn fft_transformation(elements: Vec) -> Vec { +pub fn fft_transformation(elements: Vec) -> Vec { let xs: Vec = (0..*BLOB_LEN) .map(|i| { let bin = format!("{:012b}", i); @@ -149,7 +150,7 @@ fn fft_transformation(elements: Vec) -> Vec { transform } -fn convert_to_biguint(elements: Vec) -> Vec { +pub fn convert_to_biguint(elements: Vec) -> Vec { // Initialize the vector with 4096 BigUint zeros let mut biguint_vec = vec![BigUint::zero(); 4096]; @@ -200,7 +201,7 @@ fn data_to_blobs(blob_size: u64, block_data: Vec) -> Result Ok(blobs) } -async fn state_update_to_blob_data( +pub async fn state_update_to_blob_data( block_no: u64, state_update: StateUpdate, config: &Config, @@ -342,31 +343,37 @@ fn da_word(class_flag: bool, nonce_change: Option, num_changes: u6 } #[cfg(test)] -mod tests { + +pub mod test { + use crate::jobs::da_job::da_word; use std::fs; use std::fs::File; use std::io::Read; + use crate::data_storage::MockDataStorage; use ::serde::{Deserialize, Serialize}; + use color_eyre::Result; + use da_client_interface::MockDaClient; use httpmock::prelude::*; use majin_blob_core::blob; use majin_blob_types::serde; use majin_blob_types::state_diffs::UnorderedEq; - // use majin_blob_types::serde; - use crate::data_storage::MockDataStorage; - use da_client_interface::MockDaClient; use rstest::rstest; use serde_json::json; + use starknet_core::types::{FieldElement, StateUpdate}; - use super::*; use crate::tests::common::init_config; + /// Tests `da_word` function with various inputs for class flag, new nonce, and number of changes. + /// Verifies that `da_word` produces the correct FieldElement based on the provided parameters. + /// Uses test cases with different combinations of inputs and expected output strings. + /// Asserts the function's correctness by comparing the computed and expected FieldElements. #[rstest] #[case(false, 1, 1, "18446744073709551617")] #[case(false, 1, 0, "18446744073709551616")] #[case(false, 0, 6, "6")] #[case(true, 1, 0, "340282366920938463481821351505477763072")] - fn da_word_works( + fn test_da_word( #[case] class_flag: bool, #[case] new_nonce: u64, #[case] num_changes: u64, @@ -378,24 +385,28 @@ mod tests { assert_eq!(da_word, expected); } + /// Tests `state_update_to_blob_data` conversion with different state update files and block numbers. + /// Mocks DA client and storage client interactions for the test environment. + /// Compares the generated blob data against expected values to ensure correctness. + /// Verifies the data integrity by checking that the parsed state diffs match the expected diffs. #[rstest] #[case( 631861, - "src/jobs/da_job/test_data/state_update_from_block_631861.txt", - "src/jobs/da_job/test_data/test_blob_631861.txt", - "src/jobs/da_job/test_data/nonces_from_block_631861.txt" + "src/tests/jobs/da_job/test_data/state_update/631861.txt", + "src/tests/jobs/da_job/test_data/test_blob/631861.txt", + "src/tests/jobs/da_job/test_data/nonces/631861.txt" )] #[case( 638353, - "src/jobs/da_job/test_data/state_update_from_block_638353.txt", - "src/jobs/da_job/test_data/test_blob_638353.txt", - "src/jobs/da_job/test_data/nonces_from_block_638353.txt" + "src/tests/jobs/da_job/test_data/state_update/638353.txt", + "src/tests/jobs/da_job/test_data/test_blob/638353.txt", + "src/tests/jobs/da_job/test_data/nonces/638353.txt" )] #[case( 640641, - "src/jobs/da_job/test_data/state_update_from_block_640641.txt", - "src/jobs/da_job/test_data/test_blob_640641.txt", - "src/jobs/da_job/test_data/nonces_from_block_640641.txt" + "src/tests/jobs/da_job/test_data/state_update/640641.txt", + "src/tests/jobs/da_job/test_data/test_blob/640641.txt", + "src/tests/jobs/da_job/test_data/nonces/640641.txt" )] #[tokio::test] async fn test_state_update_to_blob_data( @@ -404,6 +415,8 @@ mod tests { #[case] file_path: &str, #[case] nonce_file_path: &str, ) { + use crate::jobs::da_job::{convert_to_biguint, state_update_to_blob_data}; + let server = MockServer::start(); let mut da_client = MockDaClient::new(); let mut storage_client = MockDataStorage::new(); @@ -445,16 +458,22 @@ mod tests { assert!(block_data_state_diffs.unordered_eq(&blob_data_state_diffs), "value of data json should be identical"); } + /// Tests the `fft_transformation` function with various test blob files. + /// Verifies the correctness of FFT and IFFT transformations by ensuring round-trip consistency. + /// Parses the original blob data, recovers it using IFFT, and re-applies FFT. + /// Asserts that the transformed data matches the original pre-IFFT data, ensuring integrity. #[rstest] - #[case("src/jobs/da_job/test_data/test_blob_631861.txt")] - #[case("src/jobs/da_job/test_data/test_blob_638353.txt")] - #[case("src/jobs/da_job/test_data/test_blob_639404.txt")] - #[case("src/jobs/da_job/test_data/test_blob_640641.txt")] - #[case("src/jobs/da_job/test_data/test_blob_640644.txt")] - #[case("src/jobs/da_job/test_data/test_blob_640646.txt")] - #[case("src/jobs/da_job/test_data/test_blob_640647.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/638353.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/631861.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/639404.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/640641.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/640644.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/640646.txt")] + #[case("src/tests/jobs/da_job/test_data/test_blob/640647.txt")] fn test_fft_transformation(#[case] file_to_check: &str) { // parsing the blob hex to the bigUints + + use crate::jobs::da_job::fft_transformation; let original_blob_data = serde::parse_file_to_blob_data(file_to_check); // converting the data to its original format let ifft_blob_data = blob::recover(original_blob_data.clone()); @@ -465,6 +484,10 @@ mod tests { assert_eq!(fft_blob_data, original_blob_data); } + /// Tests the serialization and deserialization process using bincode. + /// Serializes a nested vector of integers and then deserializes it back. + /// Verifies that the original data matches the deserialized data. + /// Ensures the integrity and correctness of bincode's (de)serialization. #[rstest] fn test_bincode() { let data = vec![vec![1, 2], vec![3, 4]]; @@ -475,7 +498,7 @@ mod tests { assert_eq!(data, deserialize_data); } - pub fn read_state_update_from_file(file_path: &str) -> Result { + pub(crate) fn read_state_update_from_file(file_path: &str) -> Result { // let file_path = format!("state_update_block_no_{}.txt", block_no); let mut file = File::open(file_path)?; let mut json = String::new(); @@ -519,8 +542,6 @@ mod tests { let mut new_hex_chars = hex_chars.join(""); new_hex_chars = new_hex_chars.trim_start_matches('0').to_string(); - - // Handle the case where the trimmed string is empty (e.g., data was all zeros) if new_hex_chars.is_empty() { "0x0".to_string() } else { diff --git a/crates/orchestrator/src/tests/common/constants.rs b/crates/orchestrator/src/tests/common/constants.rs index c1884752..01c7d832 100644 --- a/crates/orchestrator/src/tests/common/constants.rs +++ b/crates/orchestrator/src/tests/common/constants.rs @@ -1,5 +1,2 @@ -pub const MADARA_RPC_URL: &str = "http://localhost:9944"; -#[allow(dead_code)] pub const ETHEREUM_MAX_BYTES_PER_BLOB: u64 = 131072; -#[allow(dead_code)] pub const ETHEREUM_MAX_BLOB_PER_TXN: u64 = 6; diff --git a/crates/orchestrator/src/tests/common/mod.rs b/crates/orchestrator/src/tests/common/mod.rs index c008aa22..d22d360e 100644 --- a/crates/orchestrator/src/tests/common/mod.rs +++ b/crates/orchestrator/src/tests/common/mod.rs @@ -4,7 +4,6 @@ use std::collections::HashMap; use std::sync::Arc; use ::uuid::Uuid; -use constants::*; use da_client_interface::MockDaClient; use mongodb::Client; use prover_client_interface::MockProverClient; @@ -13,6 +12,7 @@ use settlement_client_interface::MockSettlementClient; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use url::Url; +use utils::env_utils::get_env_var_or_panic; use crate::config::Config; use crate::data_storage::aws_s3::config::{AWSS3ConfigType, S3LocalStackConfig}; @@ -37,7 +37,7 @@ pub async fn init_config( ) -> Config { let _ = tracing_subscriber::fmt().with_max_level(tracing::Level::INFO).with_target(false).try_init(); - let rpc_url = rpc_url.unwrap_or(MADARA_RPC_URL.to_string()); + let rpc_url = rpc_url.unwrap_or(get_env_var_or_panic("MADARA_RPC_URL").to_string()); let database = database.unwrap_or_default(); let queue = queue.unwrap_or_default(); let da_client = da_client.unwrap_or_default(); diff --git a/crates/orchestrator/src/tests/config.rs b/crates/orchestrator/src/tests/config.rs index f64f18aa..89dd8189 100644 --- a/crates/orchestrator/src/tests/config.rs +++ b/crates/orchestrator/src/tests/config.rs @@ -18,6 +18,8 @@ use crate::queue::QueueProvider; use crate::tests::common::{drop_database, get_storage_client}; use httpmock::MockServer; + +use super::common::drop_database; // Inspiration : https://rust-unofficial.github.io/patterns/patterns/creational/builder.html // TestConfigBuilder allows to heavily customise the global configs based on the test's requirement. // Eg: We want to mock only the da client and leave rest to be as it is, use mock_da_client. @@ -120,6 +122,8 @@ impl TestConfigBuilder { config_force_init(config).await; + drop_database().await.unwrap(); + server } } diff --git a/crates/orchestrator/src/tests/jobs/da_job/mod.rs b/crates/orchestrator/src/tests/jobs/da_job/mod.rs index 56f08375..251f4637 100644 --- a/crates/orchestrator/src/tests/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/da_job/mod.rs @@ -1,79 +1,106 @@ -use std::collections::HashMap; - -use crate::config::{config, config_force_init}; -use crate::data_storage::MockDataStorage; -use da_client_interface::{DaVerificationStatus, MockDaClient}; -use httpmock::prelude::*; -use rstest::*; -use serde_json::json; -use starknet_core::types::{FieldElement, MaybePendingStateUpdate, StateDiff, StateUpdate}; -use uuid::Uuid; - -use super::super::common::constants::{ETHEREUM_MAX_BLOB_PER_TXN, ETHEREUM_MAX_BYTES_PER_BLOB}; -use super::super::common::{default_job_item, init_config}; +use crate::jobs::da_job::test::{get_nonce_attached, read_state_update_from_file}; use crate::jobs::da_job::DaJob; use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; -use crate::jobs::Job; - -#[rstest] -#[tokio::test] -async fn test_create_job() { - let config = init_config(None, None, None, None, None, None, None).await; - let job = DaJob.create_job(&config, String::from("0"), HashMap::new()).await; - assert!(job.is_ok()); - - let job = job.unwrap(); - - let job_type = job.job_type; - assert_eq!(job_type, JobType::DataSubmission, "job_type should be DataSubmission"); - assert!(!(job.id.is_nil()), "id should not be nil"); - assert_eq!(job.status, JobStatus::Created, "status should be Created"); - assert_eq!(job.version, 0_i32, "version should be 0"); - assert_eq!(job.external_id.unwrap_string().unwrap(), String::new(), "external_id should be empty string"); -} +use crate::tests::common::drop_database; +use crate::tests::config::TestConfigBuilder; +use crate::{config::config, jobs::Job}; +use assert_matches::assert_matches; +use color_eyre::eyre::eyre; +use da_client_interface::MockDaClient; +use mockall::predicate::always; +use rstest::rstest; +use serde_json::json; +use starknet_core::types::{FieldElement, MaybePendingStateUpdate, PendingStateUpdate, StateDiff}; +use std::collections::HashMap; +use uuid::Uuid; +/// Tests the DA Job's handling of a blob length exceeding the supported size. +/// It mocks the DA client to simulate the environment and expects an error on job processing. +/// Validates the error message for exceeding blob limits against the expected output. +/// Asserts correct behavior by comparing the received and expected error messages. #[rstest] +#[case( + "src/tests/jobs/da_job/test_data/state_update/638353.txt", + "src/tests/jobs/da_job/test_data/nonces/638353.txt", + "63853", + 110 +)] #[tokio::test] -async fn test_verify_job(#[from(default_job_item)] mut job_item: JobItem) { +async fn test_da_job_process_job_failure_on_small_blob_size( + #[case] state_update_file: String, + #[case] nonces_file: String, + #[case] internal_id: String, + #[case] current_blob_length: u64, +) { + // Mocking DA client calls let mut da_client = MockDaClient::new(); - da_client.expect_verify_inclusion().times(1).returning(|_| Ok(DaVerificationStatus::Verified)); + // dummy state will have more than 1200 bytes + da_client.expect_max_blob_per_txn().with().returning(|| 1); + da_client.expect_max_bytes_per_blob().with().returning(|| 1200); + + let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; + let config = config().await; + + let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); + + let state_update = MaybePendingStateUpdate::Update(state_update); + let state_update = serde_json::to_value(&state_update).unwrap(); + let response = json!({ "id": 640641,"jsonrpc":"2.0","result": state_update }); + + get_nonce_attached(&server, nonces_file.as_str()); + + let state_update_mock = server.mock(|when, then| { + when.path("/").body_contains("starknet_getStateUpdate"); + then.status(200).body(serde_json::to_vec(&response).unwrap()); + }); + + let max_blob_per_txn = config.da_client().max_blob_per_txn().await; + + let response = DaJob + .process_job( + config.as_ref(), + &mut JobItem { + id: Uuid::default(), + internal_id: internal_id.to_string(), + job_type: JobType::DataSubmission, + status: JobStatus::Created, + external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), + metadata: HashMap::default(), + version: 0, + }, + ) + .await; + + assert_matches!(response, + Err(e) => { + let expected_error = eyre!( + "Exceeded the maximum number of blobs per transaction: allowed {}, found {} for block {} and job id {}", + max_blob_per_txn, + current_blob_length, + internal_id.to_string(), + Uuid::default() + ) + .to_string(); + assert_eq!(e.to_string(), expected_error); + } + ); - let config = init_config(None, None, None, Some(da_client), None, None, None).await; - assert!(DaJob.verify_job(&config, &mut job_item).await.is_ok()); + state_update_mock.assert(); + let _ = drop_database().await; } +/// Tests DA Job processing failure when a block is in pending state. +/// Simulates a pending block state update and expects job processing to fail. +/// Validates that the error message matches the expected pending state error. +/// Asserts correct behavior by comparing the received and expected error messages. #[rstest] #[tokio::test] -async fn test_process_job() { - let server = MockServer::start(); - - let mut da_client = MockDaClient::new(); - let mut storage_client = MockDataStorage::new(); +async fn test_da_job_process_job_failure_on_pending_block() { + let server = TestConfigBuilder::new().build().await; + let config = config().await; let internal_id = "1"; - da_client.expect_max_bytes_per_blob().times(2).returning(move || ETHEREUM_MAX_BYTES_PER_BLOB); - da_client.expect_max_blob_per_txn().times(1).returning(move || ETHEREUM_MAX_BLOB_PER_TXN); - da_client.expect_publish_state_diff().times(1).returning(|_, _| Ok("0xbeef".to_string())); - - // Mocking storage client - storage_client.expect_put_data().returning(|_, _| Ok(())).times(1); - - let config_init = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - Some(da_client), - None, - None, - Some(storage_client), - ) - .await; - - config_force_init(config_init).await; - - let state_update = MaybePendingStateUpdate::Update(StateUpdate { - block_hash: FieldElement::default(), - new_root: FieldElement::default(), + let pending_state_update = MaybePendingStateUpdate::PendingUpdate(PendingStateUpdate { old_root: FieldElement::default(), state_diff: StateDiff { storage_diffs: vec![], @@ -84,32 +111,116 @@ async fn test_process_job() { nonces: vec![], }, }); + + let pending_state_update = serde_json::to_value(&pending_state_update).unwrap(); + let expected_response = json!({ "id": 1,"jsonrpc":"2.0","result": pending_state_update }); + + let state_update_mock = server.mock(|when, then| { + when.path("/").body_contains("starknet_getStateUpdate"); + then.status(200).body(serde_json::to_vec(&expected_response).unwrap()); + }); + + let response = DaJob + .process_job( + config.as_ref(), + &mut JobItem { + id: Uuid::default(), + internal_id: internal_id.to_string(), + job_type: JobType::DataSubmission, + status: JobStatus::Created, + external_id: ExternalId::String("1".to_string().into_boxed_str()), + metadata: HashMap::default(), + version: 0, + }, + ) + .await; + + assert_matches!(response, + Err(e) => { + let expected_error = eyre!( + "Cannot process block {} for job id {} as it's still in pending state", + internal_id.to_string(), + Uuid::default() + ) + .to_string(); + assert_eq!(e.to_string(), expected_error); + } + ); + + state_update_mock.assert(); +} + +/// Tests successful DA Job processing with valid state update and nonces files. +/// Mocks DA client to simulate environment and expects job to process without errors. +/// Validates the successful job processing by checking the return message "Done". +/// Asserts correct behavior by comparing the received and expected success messages. +#[rstest] +#[case( + "src/tests/jobs/da_job/test_data/state_update/631861.txt", + "src/tests/jobs/da_job/test_data/nonces/631861.txt", + "631861" +)] +#[case( + "src/tests/jobs/da_job/test_data/state_update/640641.txt", + "src/tests/jobs/da_job/test_data/nonces/640641.txt", + "640641" +)] +#[case( + "src/tests/jobs/da_job/test_data/state_update/638353.txt", + "src/tests/jobs/da_job/test_data/nonces/638353.txt", + "638353" +)] +#[tokio::test] +async fn test_da_job_process_job_success( + #[case] state_update_file: String, + #[case] nonces_file: String, + #[case] internal_id: String, +) { + // Mocking DA client calls + + use crate::tests::common::constants::{ETHEREUM_MAX_BLOB_PER_TXN, ETHEREUM_MAX_BYTES_PER_BLOB}; + let mut da_client = MockDaClient::new(); + da_client.expect_publish_state_diff().with(always(), always()).returning(|_, _| Ok("Done".to_string())); + // currently + da_client.expect_max_blob_per_txn().with().returning(|| ETHEREUM_MAX_BLOB_PER_TXN); + da_client.expect_max_bytes_per_blob().with().returning(|| ETHEREUM_MAX_BYTES_PER_BLOB); + + let server = TestConfigBuilder::new().mock_da_client(Box::new(da_client)).build().await; + let config = config().await; + + let state_update = read_state_update_from_file(state_update_file.as_str()).expect("issue while reading"); + let state_update = serde_json::to_value(&state_update).unwrap(); let response = json!({ "id": 1,"jsonrpc":"2.0","result": state_update }); + get_nonce_attached(&server, nonces_file.as_str()); + let state_update_mock = server.mock(|when, then| { when.path("/").body_contains("starknet_getStateUpdate"); then.status(200).body(serde_json::to_vec(&response).unwrap()); }); - assert_eq!( - DaJob - .process_job( - config().await.as_ref(), - &mut JobItem { - id: Uuid::default(), - internal_id: internal_id.to_string(), - job_type: JobType::DataSubmission, - status: JobStatus::Created, - external_id: ExternalId::String("1".to_string().into_boxed_str()), - metadata: HashMap::default(), - version: 0, - } - ) - .await - .unwrap(), - "0xbeef" + let response = DaJob + .process_job( + config.as_ref(), + &mut JobItem { + id: Uuid::default(), + internal_id: internal_id.to_string(), + job_type: JobType::DataSubmission, + status: JobStatus::Created, + external_id: ExternalId::String(internal_id.to_string().into_boxed_str()), + metadata: HashMap::default(), + version: 0, + }, + ) + .await; + + assert_matches!(response, + Ok(msg) => { + assert_eq!(msg, eyre!("Done").to_string()); + } ); state_update_mock.assert(); + let _ = drop_database().await; } diff --git a/crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_631861.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/631861.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_631861.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/631861.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_638353.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/638353.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_638353.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/638353.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_640641.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/640641.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/nonces_from_block_640641.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/nonces/640641.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_631861.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/631861.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_631861.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/631861.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_638353.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/638353.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_638353.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/638353.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_640641.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/640641.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/state_update_from_block_640641.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/state_update/640641.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_631861.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/631861.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_631861.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/631861.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_638353.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/638353.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_638353.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/638353.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_639404.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/639404.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_639404.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/639404.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_640641.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640641.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_640641.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640641.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_640644.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640644.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_640644.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640644.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_640646.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640646.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_640646.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640646.txt diff --git a/crates/orchestrator/src/jobs/da_job/test_data/test_blob_640647.txt b/crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640647.txt similarity index 100% rename from crates/orchestrator/src/jobs/da_job/test_data/test_blob_640647.txt rename to crates/orchestrator/src/tests/jobs/da_job/test_data/test_blob/640647.txt diff --git a/crates/settlement-clients/ethereum/src/lib.rs b/crates/settlement-clients/ethereum/src/lib.rs index dde57309..96f7ea10 100644 --- a/crates/settlement-clients/ethereum/src/lib.rs +++ b/crates/settlement-clients/ethereum/src/lib.rs @@ -68,6 +68,8 @@ impl EthereumSettlementClient { let provider = Arc::new( ProviderBuilder::new().with_recommended_fillers().wallet(wallet.clone()).on_http(settlement_cfg.rpc_url), ); + // It's safe to use unwrap here since this is `setup` code, + // if anything fails it is pre-assumed that any errors at this stage are handled in real time. let core_contract_client = StarknetValidityContractClient::new( Address::from_str(&settlement_cfg.core_contract_address) .expect("Failed to convert the validity contract address.")