Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tests for DA job #61

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
c508f32
update: DA job tests draft #1
heemankv Jul 26, 2024
427b1fb
update: da_job: reformatting unit tests + added integration tests
heemankv Jul 27, 2024
5a024f3
update: shifted unit tests from src/jobs/da_job to src/test/jobs/da_job
heemankv Jul 27, 2024
9aaa1c0
update: draft #1 all da-tests running
heemankv Jul 27, 2024
7b4d30f
update: removing usage of serial
heemankv Jul 27, 2024
4c17857
docs: documented test functions
heemankv Jul 29, 2024
f773b59
Update crates/orchestrator/src/tests/jobs/da_job/mod.rs
heemankv Jul 30, 2024
45526f7
Update crates/orchestrator/src/tests/jobs/da_job/mod.rs
heemankv Jul 30, 2024
aed78ee
update: PR reviews
heemankv Jul 30, 2024
fac7ed8
update: PR reviews #2
heemankv Jul 30, 2024
556687d
update: added more testcases for test_da_job_process_job_success
heemankv Jul 30, 2024
176bde5
Update crates/orchestrator/src/tests/jobs/da_job/mod.rs
heemankv Jul 31, 2024
7c1e3a1
update: removed Result from test cases
heemankv Jul 31, 2024
ac086d7
update: introducing assert_matches! in Da job tests
heemankv Jul 31, 2024
2bbb618
Merge branch 'feat/increasing-test-coverage' into tests/da-job
heemankv Jul 31, 2024
a9b29dd
update: moving unit tests back in da_job file
heemankv Jul 31, 2024
ba1d475
update: removed pub from da_word
heemankv Aug 2, 2024
519f1d0
Update/job error using this error (#66)
heemankv Aug 5, 2024
78a7254
Revert "Update/job error using this error (#66)" (#69)
apoorvsadana Aug 5, 2024
8129e32
update: PR reviews rework
heemankv Aug 8, 2024
a1163b0
chore: add drop_database to TestConfigBuilder.build()
heemankv Aug 8, 2024
feb49bb
docs: added comment for unwrap in settlement client ethereum
heemankv Aug 8, 2024
1630d0d
Merge branch 'feat/increasing-test-coverage' into tests/da-job
ocdbytes Aug 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ HOST=
PORT=
DATABASE_URL=
MADARA_RPC_URL=
DA_LAYER=
DA_LAYER=ethereum
heemankv marked this conversation as resolved.
Show resolved Hide resolved
SETTLEMENT_LAYER=

# Ethereum
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- Tests for updating the state.
- Function to update the state and publish blob on ethereum in state update job.
- Fixtures for testing.
- Tests for DA job.

## Changed

- GitHub's coverage CI yml file for localstack and db testing.
- Orchestrator :Moved TestConfigBuilder to `config.rs` in tests folder.
- Shifted Unit tests to test folder for DA job.

## Removed

Expand Down
197 changes: 5 additions & 192 deletions crates/orchestrator/src/jobs/da_job/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ impl Job for DaJob {

async fn process_job(&self, config: &Config, job: &mut JobItem) -> Result<String> {
let block_no = job.internal_id.parse::<u64>()?;

let state_update = config.starknet_client().get_state_update(BlockId::Number(block_no)).await?;

let state_update = match state_update {
Expand Down Expand Up @@ -127,7 +128,7 @@ impl Job for DaJob {
}
}

fn fft_transformation(elements: Vec<BigUint>) -> Vec<BigUint> {
pub fn fft_transformation(elements: Vec<BigUint>) -> Vec<BigUint> {
let xs: Vec<BigUint> = (0..*BLOB_LEN)
.map(|i| {
let bin = format!("{:012b}", i);
Expand All @@ -149,7 +150,7 @@ fn fft_transformation(elements: Vec<BigUint>) -> Vec<BigUint> {
transform
}

fn convert_to_biguint(elements: Vec<FieldElement>) -> Vec<BigUint> {
pub fn convert_to_biguint(elements: Vec<FieldElement>) -> Vec<BigUint> {
// Initialize the vector with 4096 BigUint zeros
let mut biguint_vec = vec![BigUint::zero(); 4096];

Expand Down Expand Up @@ -200,7 +201,7 @@ fn data_to_blobs(blob_size: u64, block_data: Vec<BigUint>) -> Result<Vec<Vec<u8>
Ok(blobs)
}

async fn state_update_to_blob_data(
pub async fn state_update_to_blob_data(
block_no: u64,
state_update: StateUpdate,
config: &Config,
Expand Down Expand Up @@ -306,7 +307,7 @@ async fn store_blob_data(blob_data: Vec<FieldElement>, block_number: u64, config
/// DA word encoding:
/// |---padding---|---class flag---|---new nonce---|---num changes---|
/// 127 bits 1 bit 64 bits 64 bits
fn da_word(class_flag: bool, nonce_change: Option<FieldElement>, num_changes: u64) -> FieldElement {
pub fn da_word(class_flag: bool, nonce_change: Option<FieldElement>, num_changes: u64) -> FieldElement {
heemankv marked this conversation as resolved.
Show resolved Hide resolved
// padding of 127 bits
let mut binary_string = "0".repeat(127);

Expand Down Expand Up @@ -340,191 +341,3 @@ fn da_word(class_flag: bool, nonce_change: Option<FieldElement>, num_changes: u6

FieldElement::from_dec_str(&decimal_string).expect("issue while converting to fieldElement")
}

#[cfg(test)]
mod tests {
use std::fs;
use std::fs::File;
use std::io::Read;

use ::serde::{Deserialize, Serialize};
use httpmock::prelude::*;
use majin_blob_core::blob;
use majin_blob_types::serde;
use majin_blob_types::state_diffs::UnorderedEq;
// use majin_blob_types::serde;
use crate::data_storage::MockDataStorage;
use da_client_interface::MockDaClient;
use rstest::rstest;
use serde_json::json;

use super::*;
use crate::tests::common::init_config;

#[rstest]
#[case(false, 1, 1, "18446744073709551617")]
#[case(false, 1, 0, "18446744073709551616")]
#[case(false, 0, 6, "6")]
#[case(true, 1, 0, "340282366920938463481821351505477763072")]
fn da_word_works(
#[case] class_flag: bool,
#[case] new_nonce: u64,
#[case] num_changes: u64,
#[case] expected: String,
) {
let new_nonce = if new_nonce > 0 { Some(FieldElement::from(new_nonce)) } else { None };
let da_word = da_word(class_flag, new_nonce, num_changes);
let expected = FieldElement::from_dec_str(expected.as_str()).unwrap();
assert_eq!(da_word, expected);
}

#[rstest]
#[case(
631861,
"src/jobs/da_job/test_data/state_update_from_block_631861.txt",
"src/jobs/da_job/test_data/test_blob_631861.txt",
"src/jobs/da_job/test_data/nonces_from_block_631861.txt"
)]
#[case(
638353,
"src/jobs/da_job/test_data/state_update_from_block_638353.txt",
"src/jobs/da_job/test_data/test_blob_638353.txt",
"src/jobs/da_job/test_data/nonces_from_block_638353.txt"
)]
#[case(
640641,
"src/jobs/da_job/test_data/state_update_from_block_640641.txt",
"src/jobs/da_job/test_data/test_blob_640641.txt",
"src/jobs/da_job/test_data/nonces_from_block_640641.txt"
)]
#[tokio::test]
async fn test_state_update_to_blob_data(
#[case] block_no: u64,
#[case] state_update_file_path: &str,
#[case] file_path: &str,
#[case] nonce_file_path: &str,
) {
let server = MockServer::start();
let mut da_client = MockDaClient::new();
let mut storage_client = MockDataStorage::new();

// Mocking DA client calls
da_client.expect_max_blob_per_txn().with().returning(|| 6);
da_client.expect_max_bytes_per_blob().with().returning(|| 131072);

// Mocking storage client
storage_client.expect_put_data().returning(|_, _| Result::Ok(())).times(1);

let config = init_config(
Some(format!("http://localhost:{}", server.port())),
None,
None,
Some(da_client),
None,
None,
Some(storage_client),
)
.await;

get_nonce_attached(&server, nonce_file_path);

let state_update = read_state_update_from_file(state_update_file_path).expect("issue while reading");
let blob_data = state_update_to_blob_data(block_no, state_update, &config)
.await
.expect("issue while converting state update to blob data");

let blob_data_biguint = convert_to_biguint(blob_data);

let block_data_state_diffs = serde::parse_state_diffs(blob_data_biguint.as_slice());

let original_blob_data = serde::parse_file_to_blob_data(file_path);
// converting the data to it's original format
let recovered_blob_data = blob::recover(original_blob_data.clone());
let blob_data_state_diffs = serde::parse_state_diffs(recovered_blob_data.as_slice());

assert!(block_data_state_diffs.unordered_eq(&blob_data_state_diffs), "value of data json should be identical");
}

#[rstest]
#[case("src/jobs/da_job/test_data/test_blob_631861.txt")]
#[case("src/jobs/da_job/test_data/test_blob_638353.txt")]
#[case("src/jobs/da_job/test_data/test_blob_639404.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640641.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640644.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640646.txt")]
#[case("src/jobs/da_job/test_data/test_blob_640647.txt")]
fn test_fft_transformation(#[case] file_to_check: &str) {
// parsing the blob hex to the bigUints
let original_blob_data = serde::parse_file_to_blob_data(file_to_check);
// converting the data to its original format
let ifft_blob_data = blob::recover(original_blob_data.clone());
// applying the fft function again on the original format
let fft_blob_data = fft_transformation(ifft_blob_data);

// ideally the data after fft transformation and the data before ifft should be same.
assert_eq!(fft_blob_data, original_blob_data);
}

#[rstest]
fn test_bincode() {
let data = vec![vec![1, 2], vec![3, 4]];

let serialize_data = bincode::serialize(&data).unwrap();
let deserialize_data: Vec<Vec<u8>> = bincode::deserialize(&serialize_data).unwrap();

assert_eq!(data, deserialize_data);
}

pub fn read_state_update_from_file(file_path: &str) -> Result<StateUpdate> {
// let file_path = format!("state_update_block_no_{}.txt", block_no);
let mut file = File::open(file_path)?;
let mut json = String::new();
file.read_to_string(&mut json)?;
let state_update: StateUpdate = serde_json::from_str(&json)?;
Ok(state_update)
}

#[derive(Serialize, Deserialize, Debug)]
struct NonceAddress {
nonce: String,
address: String,
}

pub fn get_nonce_attached(server: &MockServer, file_path: &str) {
// Read the file
let file_content = fs::read_to_string(file_path).expect("Unable to read file");

// Parse the JSON content into a vector of NonceAddress
let nonce_addresses: Vec<NonceAddress> =
serde_json::from_str(&file_content).expect("JSON was not well-formatted");

// Set up mocks for each entry
for entry in nonce_addresses {
let address = entry.address.clone();
let nonce = entry.nonce.clone();
let response = json!({ "id": 1,"jsonrpc":"2.0","result": nonce });
let field_element =
FieldElement::from_dec_str(&address).expect("issue while converting the hex to field").to_bytes_be();
let hex_field_element = vec_u8_to_hex_string(&field_element);

server.mock(|when, then| {
when.path("/").body_contains("starknet_getNonce").body_contains(hex_field_element);
then.status(200).body(serde_json::to_vec(&response).unwrap());
});
}
}

fn vec_u8_to_hex_string(data: &[u8]) -> String {
let hex_chars: Vec<String> = data.iter().map(|byte| format!("{:02x}", byte)).collect();

let mut new_hex_chars = hex_chars.join("");
new_hex_chars = new_hex_chars.trim_start_matches('0').to_string();

// Handle the case where the trimmed string is empty (e.g., data was all zeros)
if new_hex_chars.is_empty() {
"0x0".to_string()
} else {
format!("0x{}", new_hex_chars)
}
}
}
Loading