From 8c80c839f560efca8d143709a8d47dcd1a2e9f05 Mon Sep 17 00:00:00 2001 From: Shunkichi Sato <49983831+s8sato@users.noreply.github.com> Date: Mon, 18 Nov 2024 17:08:41 +0900 Subject: [PATCH 1/5] refactor(multisig)!: move from triggers to custom instructions (#5217) BREAKING CHANGES: - (api-changes) `MultisigRegister` `MultisigPropose` `MultisigApprove` custom instructions Signed-off-by: Shunkichi Sato <49983831+s8sato@users.noreply.github.com> --- Cargo.lock | 14 +- Cargo.toml | 2 - crates/iroha/Cargo.toml | 3 +- crates/iroha/src/lib.rs | 2 +- crates/iroha/tests/multisig.rs | 441 ++++++++---------- crates/iroha_cli/src/main.rs | 122 ++--- crates/iroha_data_model/src/visit.rs | 6 +- crates/iroha_executor/src/default/isi/mod.rs | 49 ++ .../src/default/isi/multisig/account.rs | 64 +++ .../src/default/isi/multisig/mod.rs | 52 +++ .../src/default/isi/multisig/transaction.rs | 249 ++++++++++ .../src/{default.rs => default/mod.rs} | 155 ++---- crates/iroha_executor/src/lib.rs | 4 + crates/iroha_executor/src/permission.rs | 34 +- crates/iroha_executor_data_model/Cargo.toml | 1 + crates/iroha_executor_data_model/src/isi.rs | 118 +++++ crates/iroha_executor_data_model/src/lib.rs | 1 + .../src/permission.rs | 10 - crates/iroha_executor_derive/src/default.rs | 6 +- crates/iroha_genesis/src/lib.rs | 49 -- crates/iroha_kagami/src/genesis/generate.rs | 24 +- crates/iroha_schema/src/lib.rs | 4 +- crates/iroha_schema_gen/Cargo.toml | 1 - crates/iroha_schema_gen/src/lib.rs | 27 +- crates/iroha_test_network/src/lib.rs | 2 +- .../libs/iroha_multisig_data_model/Cargo.toml | 19 - .../libs/iroha_multisig_data_model/src/lib.rs | 74 --- defaults/genesis.json | 75 +-- docs/source/references/schema.json | 62 ++- scripts/build_wasm.sh | 3 - scripts/tests/instructions.json | 2 +- scripts/tests/multisig.recursion.sh | 13 +- scripts/tests/multisig.sh | 4 +- wasm/Cargo.toml | 1 - wasm/libs/multisig_accounts/Cargo.toml | 22 - wasm/libs/multisig_accounts/src/lib.rs | 150 ------ wasm/libs/multisig_domains/Cargo.toml | 21 - wasm/libs/multisig_domains/src/lib.rs | 77 --- wasm/libs/multisig_transactions/Cargo.toml | 21 - wasm/libs/multisig_transactions/src/lib.rs | 228 --------- .../src/lib.rs | 4 +- .../src/lib.rs | 4 +- 42 files changed, 915 insertions(+), 1305 deletions(-) create mode 100644 crates/iroha_executor/src/default/isi/mod.rs create mode 100644 crates/iroha_executor/src/default/isi/multisig/account.rs create mode 100644 crates/iroha_executor/src/default/isi/multisig/mod.rs create mode 100644 crates/iroha_executor/src/default/isi/multisig/transaction.rs rename crates/iroha_executor/src/{default.rs => default/mod.rs} (93%) create mode 100644 crates/iroha_executor_data_model/src/isi.rs delete mode 100644 data_model/libs/iroha_multisig_data_model/Cargo.toml delete mode 100644 data_model/libs/iroha_multisig_data_model/src/lib.rs delete mode 100644 wasm/libs/multisig_accounts/Cargo.toml delete mode 100644 wasm/libs/multisig_accounts/src/lib.rs delete mode 100644 wasm/libs/multisig_domains/Cargo.toml delete mode 100644 wasm/libs/multisig_domains/src/lib.rs delete mode 100644 wasm/libs/multisig_transactions/Cargo.toml delete mode 100644 wasm/libs/multisig_transactions/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 4940c0f920d..2f5fc18928a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2919,7 +2919,6 @@ dependencies = [ "iroha_executor_data_model", "iroha_genesis", "iroha_logger", - "iroha_multisig_data_model", "iroha_primitives", "iroha_telemetry", "iroha_test_network", @@ -3206,6 +3205,7 @@ dependencies = [ name = "iroha_executor_data_model" version = "2.0.0-rc.1.0" dependencies = [ + "derive_more", "iroha_data_model", "iroha_executor_data_model_derive", "iroha_schema", @@ -3364,17 +3364,6 @@ dependencies = [ "syn 2.0.75", ] -[[package]] -name = "iroha_multisig_data_model" -version = "2.0.0-rc.1.0" -dependencies = [ - "iroha_data_model", - "iroha_schema", - "parity-scale-codec", - "serde", - "serde_json", -] - [[package]] name = "iroha_numeric" version = "2.0.0-rc.1.0" @@ -3480,7 +3469,6 @@ dependencies = [ "iroha_data_model", "iroha_executor_data_model", "iroha_genesis", - "iroha_multisig_data_model", "iroha_primitives", "iroha_schema", ] diff --git a/Cargo.toml b/Cargo.toml index f0e087d5966..740812a3f2a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,7 +44,6 @@ iroha_smart_contract_utils = { version = "=2.0.0-rc.1.0", path = "crates/iroha_s iroha_executor = { version = "=2.0.0-rc.1.0", path = "crates/iroha_executor" } iroha_data_model = { version = "=2.0.0-rc.1.0", path = "crates/iroha_data_model", default-features = false } -iroha_multisig_data_model = { version = "=2.0.0-rc.1.0", path = "data_model/libs/iroha_multisig_data_model" } iroha_executor_data_model = { version = "=2.0.0-rc.1.0", path = "crates/iroha_executor_data_model" } iroha_test_network = { version = "=2.0.0-rc.1.0", path = "crates/iroha_test_network" } @@ -199,7 +198,6 @@ clippy.wildcard_dependencies = "deny" resolver = "2" members = [ "crates/*", - "data_model/libs/*", "data_model/samples/*" ] diff --git a/crates/iroha/Cargo.toml b/crates/iroha/Cargo.toml index 6990b70d363..88e705e6efc 100644 --- a/crates/iroha/Cargo.toml +++ b/crates/iroha/Cargo.toml @@ -58,7 +58,7 @@ iroha_torii_const = { workspace = true } iroha_version = { workspace = true } iroha_data_model = { workspace = true, features = ["http"] } -iroha_multisig_data_model = { workspace = true } +iroha_executor_data_model = { workspace = true } attohttpc = { version = "0.28.0", default-features = false } eyre = { workspace = true } @@ -88,7 +88,6 @@ iroha_test_network = { workspace = true } mint_rose_trigger_data_model = { path = "../../data_model/samples/mint_rose_trigger_data_model" } executor_custom_data_model = { path = "../../data_model/samples/executor_custom_data_model" } -iroha_executor_data_model = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread"] } reqwest = { version = "0.12.7", features = ["json"] } diff --git a/crates/iroha/src/lib.rs b/crates/iroha/src/lib.rs index 0daf1930e55..02c3553fb1d 100644 --- a/crates/iroha/src/lib.rs +++ b/crates/iroha/src/lib.rs @@ -9,4 +9,4 @@ mod secrecy; pub use iroha_crypto as crypto; pub use iroha_data_model as data_model; -pub use iroha_multisig_data_model as multisig_data_model; +pub use iroha_executor_data_model as executor_data_model; diff --git a/crates/iroha/tests/multisig.rs b/crates/iroha/tests/multisig.rs index 84c6c453f9a..1ac73daca89 100644 --- a/crates/iroha/tests/multisig.rs +++ b/crates/iroha/tests/multisig.rs @@ -1,77 +1,108 @@ use std::{ collections::{BTreeMap, BTreeSet}, + num::{NonZeroU16, NonZeroU64}, time::Duration, }; +use derive_more::Constructor; use eyre::Result; use iroha::{ client::Client, crypto::KeyPair, - data_model::{prelude::*, query::trigger::FindTriggers, Level}, + data_model::{prelude::*, Level}, + executor_data_model::isi::multisig::*, }; -use iroha_data_model::events::execute_trigger::ExecuteTriggerEventFilter; -use iroha_multisig_data_model::{MultisigAccountArgs, MultisigTransactionArgs}; +use iroha_executor_data_model::permission::account::CanRegisterAccount; use iroha_test_network::*; use iroha_test_samples::{ gen_account_in, ALICE_ID, BOB_ID, BOB_KEYPAIR, CARPENTER_ID, CARPENTER_KEYPAIR, }; +#[derive(Constructor)] +struct TestSuite { + domain: DomainId, + multisig_account_id: AccountId, + unauthorized_target_opt: Option, + transaction_ttl_ms_opt: Option, +} + #[test] -fn multisig() -> Result<()> { - multisig_base(None) +fn multisig_normal() -> Result<()> { + // New domain for this test + let domain = "kingdom".parse().unwrap(); + // Create a multisig account ID and discard the corresponding private key + // FIXME #5022 refuse user input to prevent multisig monopoly and pre-registration hijacking + let multisig_account_id = gen_account_in(&domain).0; + // Make some changes to the multisig account itself + let unauthorized_target_opt = None; + // Semi-permanently valid + let transaction_ttl_ms_opt = None; + + let suite = TestSuite::new( + domain, + multisig_account_id, + unauthorized_target_opt, + transaction_ttl_ms_opt, + ); + multisig_base(suite) +} + +#[test] +fn multisig_unauthorized() -> Result<()> { + let domain = "kingdom".parse().unwrap(); + let multisig_account_id = gen_account_in(&domain).0; + // Someone that the multisig account has no permission to access + let unauthorized_target_opt = Some(ALICE_ID.clone()); + + let suite = TestSuite::new(domain, multisig_account_id, unauthorized_target_opt, None); + multisig_base(suite) } #[test] fn multisig_expires() -> Result<()> { - multisig_base(Some(2)) + let domain = "kingdom".parse().unwrap(); + let multisig_account_id = gen_account_in(&domain).0; + // Expires after 1 sec + let transaction_ttl_ms_opt = Some(1_000); + + let suite = TestSuite::new(domain, multisig_account_id, None, transaction_ttl_ms_opt); + multisig_base(suite) } /// # Scenario /// -/// Proceeds from top left to bottom right. Starred operations are the responsibility of the user -/// -/// ``` -/// | world level | domain level | account level | transaction level | -/// |---------------------------|-----------------------------|---------------------------------|----------------------| -/// | given domains initializer | | | | -/// | | * creates domain | | | -/// | domains initializer | generates accounts registry | | | -/// | | | * creates signatories | | -/// | | * calls accounts registry | generates multisig account | | -/// | | accounts registry | generates transactions registry | | -/// | | | * calls transactions registry | proposes transaction | -/// | | | * calls transactions registry | approves transaction | -/// | | | transactions registry | executes transaction | -/// ``` -#[allow(clippy::cast_possible_truncation)] -fn multisig_base(transaction_ttl_ms: Option) -> Result<()> { +/// 1. Signatories are populated and ready to join a multisig account +/// 2. Someone in the domain registers a multisig account +/// 3. One of the signatories of the multisig account proposes a multisig transaction +/// 4. Other signatories approve the multisig transaction +/// 5. The multisig transaction executes when all of the following are met: +/// - Quorum reached: authenticated +/// - Transaction has not expired +/// - Every instruction validated against the multisig account: authorized +/// 6. Either execution or expiration on approval deletes the transaction entry +#[expect(clippy::cast_possible_truncation, clippy::too_many_lines)] +fn multisig_base(suite: TestSuite) -> Result<()> { const N_SIGNATORIES: usize = 5; + let TestSuite { + domain, + multisig_account_id, + unauthorized_target_opt, + transaction_ttl_ms_opt, + } = suite; + let (network, _rt) = NetworkBuilder::new().start_blocking()?; let test_client = network.client(); - let kingdom: DomainId = "kingdom".parse().unwrap(); - // Assume some domain registered after genesis let register_and_transfer_kingdom: [InstructionBox; 2] = [ - Register::domain(Domain::new(kingdom.clone())).into(), - Transfer::domain(ALICE_ID.clone(), kingdom.clone(), BOB_ID.clone()).into(), + Register::domain(Domain::new(domain.clone())).into(), + Transfer::domain(ALICE_ID.clone(), domain.clone(), BOB_ID.clone()).into(), ]; test_client.submit_all_blocking(register_and_transfer_kingdom)?; - // One more block to generate a multisig accounts registry for the domain - test_client.submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string()))?; - - // Check that the multisig accounts registry has been generated - let multisig_accounts_registry_id = multisig_accounts_registry_of(&kingdom); - let _trigger = test_client - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(multisig_accounts_registry_id.clone())) - .execute_single() - .expect("multisig accounts registry should be generated after domain creation"); - // Populate residents in the domain - let mut residents = core::iter::repeat_with(|| gen_account_in(&kingdom)) + let mut residents = core::iter::repeat_with(|| gen_account_in(&domain)) .take(1 + N_SIGNATORIES) .collect::>(); alt_client((BOB_ID.clone(), BOB_KEYPAIR.clone()), &test_client).submit_all_blocking( @@ -82,25 +113,28 @@ fn multisig_base(transaction_ttl_ms: Option) -> Result<()> { .map(Register::account), )?; - // Create a multisig account ID and discard the corresponding private key - let multisig_account_id = gen_account_in(&kingdom).0; - - let not_signatory = residents.pop_first().unwrap(); + let non_signatory = residents.pop_first().unwrap(); let mut signatories = residents; - let args = &MultisigAccountArgs { - account: multisig_account_id.signatory().clone(), - signatories: signatories + let register_multisig_account = MultisigRegister::new( + multisig_account_id.clone(), + signatories .keys() .enumerate() .map(|(weight, id)| (id.clone(), 1 + weight as u8)) .collect(), - // Can be met without the first signatory - quorum: (1..=N_SIGNATORIES).skip(1).sum::() as u16, - transaction_ttl_ms: transaction_ttl_ms.unwrap_or(u64::MAX), - }; - let register_multisig_account = - ExecuteTrigger::new(multisig_accounts_registry_id).with_args(args); + // Quorum can be reached without the first signatory + (1..=N_SIGNATORIES) + .skip(1) + .sum::() + .try_into() + .ok() + .and_then(NonZeroU16::new) + .unwrap(), + transaction_ttl_ms_opt + .and_then(NonZeroU64::new) + .unwrap_or(NonZeroU64::MAX), + ); // Any account in another domain cannot register a multisig account without special permission let _err = alt_client( @@ -110,75 +144,111 @@ fn multisig_base(transaction_ttl_ms: Option) -> Result<()> { .submit_blocking(register_multisig_account.clone()) .expect_err("multisig account should not be registered by account of another domain"); - // Any account in the same domain can register a multisig account without special permission - alt_client(not_signatory, &test_client) + // Non-signatory account in the same domain cannot register a multisig account without special permission + let _err = alt_client(non_signatory.clone(), &test_client) + .submit_blocking(register_multisig_account.clone()) + .expect_err( + "multisig account should not be registered by non-signatory account of the same domain", + ); + + // All but the first signatory approve the proposal + let signatory = signatories.pop_first().unwrap(); + + // Signatory account cannot register a multisig account without special permission + let _err = alt_client(signatory, &test_client) + .submit_blocking(register_multisig_account.clone()) + .expect_err("multisig account should not be registered by signatory account"); + + // Account with permission can register a multisig account + alt_client((BOB_ID.clone(), BOB_KEYPAIR.clone()), &test_client).submit_blocking( + Grant::account_permission(CanRegisterAccount { domain }, non_signatory.0.clone()), + )?; + alt_client(non_signatory, &test_client) .submit_blocking(register_multisig_account) - .expect("multisig account should be registered by account of the same domain"); + .expect("multisig account should be registered by account with permission"); // Check that the multisig account has been registered test_client .query(FindAccounts::new()) .filter_with(|account| account.id.eq(multisig_account_id.clone())) .execute_single() - .expect("multisig account should be created by calling the multisig accounts registry"); - - // Check that the multisig transactions registry has been generated - let multisig_transactions_registry_id = multisig_transactions_registry_of(&multisig_account_id); - let _trigger = test_client - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(multisig_transactions_registry_id.clone())) - .execute_single() - .expect("multisig transactions registry should be generated along with the corresponding multisig account"); + .expect("multisig account should be created"); - let key: Name = "key".parse().unwrap(); + let key: Name = "success_marker".parse().unwrap(); + let transaction_target = unauthorized_target_opt + .as_ref() + .unwrap_or(&multisig_account_id) + .clone(); let instructions = vec![SetKeyValue::account( - multisig_account_id.clone(), + transaction_target.clone(), key.clone(), - "value".parse::().unwrap(), + "congratulations".parse::().unwrap(), ) .into()]; let instructions_hash = HashOf::new(&instructions); let proposer = signatories.pop_last().unwrap(); - let approvers = signatories; - - let args = &MultisigTransactionArgs::Propose(instructions); - let propose = ExecuteTrigger::new(multisig_transactions_registry_id.clone()).with_args(args); + let mut approvers = signatories.into_iter(); + let propose = MultisigPropose::new(multisig_account_id.clone(), instructions); alt_client(proposer, &test_client).submit_blocking(propose)?; - // Check that the multisig transaction has not yet executed - let _err = test_client - .query_single(FindAccountMetadata::new( - multisig_account_id.clone(), - key.clone(), - )) - .expect_err("key-value shouldn't be set without enough approvals"); - // Allow time to elapse to test the expiration - if let Some(ms) = transaction_ttl_ms { + if let Some(ms) = transaction_ttl_ms_opt { std::thread::sleep(Duration::from_millis(ms)) }; test_client.submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string()))?; - // All but the first signatory approve the multisig transaction - for approver in approvers.into_iter().skip(1) { - let args = &MultisigTransactionArgs::Approve(instructions_hash); - let approve = - ExecuteTrigger::new(multisig_transactions_registry_id.clone()).with_args(args); + let approve = MultisigApprove::new(multisig_account_id.clone(), instructions_hash); + + // Approve once to see if the proposal expires + let approver = approvers.next().unwrap(); + alt_client(approver, &test_client).submit_blocking(approve.clone())?; - alt_client(approver, &test_client).submit_blocking(approve)?; + // Subsequent approvals should succeed unless the proposal is expired + for _ in 0..(N_SIGNATORIES - 4) { + let approver = approvers.next().unwrap(); + let res = alt_client(approver, &test_client).submit_blocking(approve.clone()); + match &transaction_ttl_ms_opt { + None => assert!(res.is_ok()), + _ => assert!(res.is_err()), + } } - // Check that the multisig transaction has executed + + // Check that the multisig transaction has not yet executed + let _err = test_client + .query_single(FindAccountMetadata::new( + transaction_target.clone(), + key.clone(), + )) + .expect_err("instructions shouldn't execute without enough approvals"); + + // The last approve to proceed to validate and execute the instructions + let approver = approvers.next().unwrap(); + let res = alt_client(approver, &test_client).submit_blocking(approve.clone()); + match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { + (None, None) => assert!(res.is_ok()), + _ => assert!(res.is_err()), + } + + // Check if the multisig transaction has executed + let res = test_client.query_single(FindAccountMetadata::new(transaction_target, key.clone())); + match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { + (None, None) => assert!(res.is_ok()), + _ => assert!(res.is_err()), + } + + // Check if the transaction entry is deleted let res = test_client.query_single(FindAccountMetadata::new( - multisig_account_id.clone(), - key.clone(), + multisig_account_id, + format!("proposals/{instructions_hash}/instructions") + .parse() + .unwrap(), )); - - if transaction_ttl_ms.is_some() { - let _err = res.expect_err("key-value shouldn't be set despite enough approvals"); - } else { - res.expect("key-value should be set with enough approvals"); + match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { + // In case failing validation, the entry can exit only by expiring + (None, Some(_)) => assert!(res.is_ok()), + _ => assert!(res.is_err()), } Ok(()) @@ -196,13 +266,12 @@ fn multisig_base(transaction_ttl_ms: Option) -> Result<()> { /// 0 1 2 3 4 5 <--- personal signatories /// ``` #[test] -#[allow(clippy::similar_names, clippy::too_many_lines)] +#[expect(clippy::similar_names, clippy::too_many_lines)] fn multisig_recursion() -> Result<()> { let (network, _rt) = NetworkBuilder::new().start_blocking()?; let test_client = network.client(); let wonderland = "wonderland"; - let ms_accounts_registry_id = multisig_accounts_registry_of(&wonderland.parse().unwrap()); // Populate signatories in the domain let signatories = core::iter::repeat_with(|| gen_account_in(wonderland)) @@ -227,14 +296,16 @@ fn multisig_recursion() -> Result<()> { .into_iter() .map(|sigs| { let ms_account_id = gen_account_in(wonderland).0; - let args = MultisigAccountArgs { - account: ms_account_id.signatory().clone(), - signatories: sigs.iter().copied().map(|id| (id.clone(), 1)).collect(), - quorum: sigs.len().try_into().unwrap(), - transaction_ttl_ms: u64::MAX, - }; - let register_ms_account = - ExecuteTrigger::new(ms_accounts_registry_id.clone()).with_args(&args); + let register_ms_account = MultisigRegister::new( + ms_account_id.clone(), + sigs.iter().copied().map(|id| (id.clone(), 1)).collect(), + sigs.len() + .try_into() + .ok() + .and_then(NonZeroU16::new) + .unwrap(), + NonZeroU64::new(u64::MAX).unwrap(), + ); test_client .submit_blocking(register_ms_account) @@ -264,52 +335,37 @@ fn multisig_recursion() -> Result<()> { let msa_012345 = msas[0].clone(); // One of personal signatories proposes a multisig transaction - let key: Name = "key".parse().unwrap(); + let key: Name = "success_marker".parse().unwrap(); let instructions = vec![SetKeyValue::account( msa_012345.clone(), key.clone(), - "value".parse::().unwrap(), + "congratulations".parse::().unwrap(), ) .into()]; let instructions_hash = HashOf::new(&instructions); let proposer = sigs_0.pop_last().unwrap(); - let ms_transactions_registry_id = multisig_transactions_registry_of(&msa_012345); - let args = MultisigTransactionArgs::Propose(instructions); - let propose = ExecuteTrigger::new(ms_transactions_registry_id.clone()).with_args(&args); + let propose = MultisigPropose::new(msa_012345.clone(), instructions); alt_client(proposer, &test_client).submit_blocking(propose)?; - // Ticks as many times as the multisig recursion - (0..2).for_each(|_| { - test_client - .submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string())) - .unwrap(); - }); - - // Check that the entire authentication policy has been deployed down to one of the leaf registries + // Check that the entire authentication policy has been deployed down to one of the leaf signatories let approval_hash_to_12345 = { let approval_hash_to_012345 = { - let registry_id = multisig_transactions_registry_of(&msa_012345); - let args = MultisigTransactionArgs::Approve(instructions_hash); - let approve: InstructionBox = ExecuteTrigger::new(registry_id.clone()) - .with_args(&args) - .into(); + let approve: InstructionBox = + MultisigApprove::new(msa_012345.clone(), instructions_hash).into(); HashOf::new(&vec![approve]) }; - let registry_id = multisig_transactions_registry_of(&msa_12345); - let args = MultisigTransactionArgs::Approve(approval_hash_to_012345); - let approve: InstructionBox = ExecuteTrigger::new(registry_id.clone()) - .with_args(&args) - .into(); + let approve: InstructionBox = + MultisigApprove::new(msa_12345.clone(), approval_hash_to_012345).into(); HashOf::new(&vec![approve]) }; let approvals_at_12: BTreeSet = test_client - .query_single(FindTriggerMetadata::new( - multisig_transactions_registry_of(&msa_12), + .query_single(FindAccountMetadata::new( + msa_12.clone(), format!("proposals/{approval_hash_to_12345}/approvals") .parse() .unwrap(), @@ -323,16 +379,14 @@ fn multisig_recursion() -> Result<()> { // Check that the multisig transaction has not yet executed let _err = test_client .query_single(FindAccountMetadata::new(msa_012345.clone(), key.clone())) - .expect_err("key-value shouldn't be set without enough approvals"); + .expect_err("instructions shouldn't execute without enough approvals"); // All the rest signatories approve the multisig transaction let approve_for_each = |approvers: BTreeMap, instructions_hash: HashOf>, ms_account: &AccountId| { for approver in approvers { - let registry_id = multisig_transactions_registry_of(ms_account); - let args = MultisigTransactionArgs::Approve(instructions_hash); - let approve = ExecuteTrigger::new(registry_id.clone()).with_args(&args); + let approve = MultisigApprove::new(ms_account.clone(), instructions_hash); alt_client(approver, &test_client) .submit_blocking(approve) @@ -343,47 +397,10 @@ fn multisig_recursion() -> Result<()> { approve_for_each(sigs_12, approval_hash_to_12345, &msa_12); approve_for_each(sigs_345, approval_hash_to_12345, &msa_345); - // Let the intermediate registry (12345) collect approvals and approve the original proposal - test_client.submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string()))?; - - // Let the root registry (012345) collect approvals and execute the original proposal - test_client.submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string()))?; - // Check that the multisig transaction has executed test_client .query_single(FindAccountMetadata::new(msa_012345.clone(), key.clone())) - .expect("key-value should be set with enough approvals"); - - Ok(()) -} - -#[test] -fn persistent_domain_level_authority() -> Result<()> { - let (network, _rt) = NetworkBuilder::new().start_blocking()?; - let test_client = network.client(); - - let wonderland: DomainId = "wonderland".parse().unwrap(); - - let ms_accounts_registry_id = multisig_accounts_registry_of(&wonderland); - - // Domain owner changes from Alice to Bob - test_client.submit_blocking(Transfer::domain( - ALICE_ID.clone(), - wonderland, - BOB_ID.clone(), - ))?; - - // One block gap to follow the domain owner change - test_client.submit_blocking(Log::new(Level::DEBUG, "Just ticking time".to_string()))?; - - // Bob is the authority of the wonderland multisig accounts registry - let ms_accounts_registry = test_client - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(ms_accounts_registry_id.clone())) - .execute_single() - .expect("multisig accounts registry should survive before and after a domain owner change"); - - assert!(*ms_accounts_registry.action().authority() == BOB_ID.clone()); + .expect("instructions should execute with enough approvals"); Ok(()) } @@ -396,61 +413,18 @@ fn reserved_names() { let account_in_another_domain = gen_account_in("garden_of_live_flowers").0; { - let reserved_prefix = "multisig_accounts_"; - let register = { - let id: TriggerId = format!("{reserved_prefix}{}", account_in_another_domain.domain()) - .parse() - .unwrap(); - let action = Action::new( - Vec::::new(), - Repeats::Indefinitely, - ALICE_ID.clone(), - ExecuteTriggerEventFilter::new(), - ); - Register::trigger(Trigger::new(id, action)) - }; - let _err = test_client.submit_blocking(register).expect_err( - "trigger with this name shouldn't be registered by anyone other than multisig system", - ); - } - - { - let reserved_prefix = "multisig_transactions_"; - let register = { - let id: TriggerId = format!( - "{reserved_prefix}{}_{}", - account_in_another_domain.signatory(), - account_in_another_domain.domain() - ) - .parse() - .unwrap(); - let action = Action::new( - Vec::::new(), - Repeats::Indefinitely, - ALICE_ID.clone(), - ExecuteTriggerEventFilter::new(), - ); - Register::trigger(Trigger::new(id, action)) - }; - let _err = test_client.submit_blocking(register).expect_err( - "trigger with this name shouldn't be registered by anyone other than domain owner", - ); - } - - { - let reserved_prefix = "multisig_signatory_"; let register = { - let id: RoleId = format!( - "{reserved_prefix}{}_{}", - account_in_another_domain.signatory(), - account_in_another_domain.domain() + let role = format!( + "MULTISIG_SIGNATORY/{}/{}", + account_in_another_domain.domain(), + account_in_another_domain.signatory() ) .parse() .unwrap(); - Register::role(Role::new(id, ALICE_ID.clone())) + Register::role(Role::new(role, ALICE_ID.clone())) }; let _err = test_client.submit_blocking(register).expect_err( - "role with this name shouldn't be registered by anyone other than domain owner", + "role with this name shouldn't be registered by anyone other than the domain owner", ); } } @@ -463,28 +437,13 @@ fn alt_client(signatory: (AccountId, KeyPair), base_client: &Client) -> Client { } } -fn multisig_accounts_registry_of(domain: &DomainId) -> TriggerId { - format!("multisig_accounts_{domain}",).parse().unwrap() -} - -fn multisig_transactions_registry_of(multisig_account: &AccountId) -> TriggerId { - format!( - "multisig_transactions_{}_{}", - multisig_account.signatory(), - multisig_account.domain() - ) - .parse() - .unwrap() -} - -#[allow(dead_code)] -fn debug_mst_registry(msa: &AccountId, client: &Client) { - let mst_registry = client - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(multisig_transactions_registry_of(msa))) +#[expect(dead_code)] +fn debug_account(account_id: &AccountId, client: &Client) { + let account = client + .query(FindAccounts) + .filter_with(|account| account.id.eq(account_id.clone())) .execute_single() .unwrap(); - let mst_metadata = mst_registry.action().metadata(); - iroha_logger::error!(%msa, ?mst_metadata); + iroha_logger::error!(?account); } diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index 41c8d252843..c0fb1e9761e 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -1177,12 +1177,13 @@ mod json { } mod multisig { - use std::io::{BufReader, Read as _}; - - use iroha::multisig_data_model::{ - MultisigAccountArgs, MultisigTransactionArgs, DEFAULT_MULTISIG_TTL_MS, + use std::{ + io::{BufReader, Read as _}, + num::{NonZeroU16, NonZeroU64}, }; + use iroha::executor_data_model::isi::multisig::*; + use super::*; /// Arguments for multisig subcommand @@ -1190,7 +1191,7 @@ mod multisig { pub enum Args { /// Register a multisig account Register(Register), - /// Propose a multisig transaction + /// Propose a multisig transaction, with `Vec` stdin Propose(Propose), /// Approve a multisig transaction Approve(Approve), @@ -1230,30 +1231,20 @@ mod multisig { impl RunArgs for Register { fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - account, - signatories, - weights, - quorum, - transaction_ttl, - } = self; - if signatories.len() != weights.len() { + if self.signatories.len() != self.weights.len() { return Err(eyre!("signatories and weights must be equal in length")); } - let registry_id: TriggerId = format!("multisig_accounts_{}", account.domain()) - .parse() - .unwrap(); - let args = MultisigAccountArgs { - account: account.signatory().clone(), - signatories: signatories.into_iter().zip(weights).collect(), - quorum, - transaction_ttl_ms: transaction_ttl + let register_multisig_account = MultisigRegister::new( + self.account, + self.signatories.into_iter().zip(self.weights).collect(), + NonZeroU16::new(self.quorum).expect("quorum should not be 0"), + self.transaction_ttl .as_millis() .try_into() - .expect("ttl must be within 584942417 years"), - }; - let register_multisig_account = - iroha::data_model::isi::ExecuteTrigger::new(registry_id).with_args(&args); + .ok() + .and_then(NonZeroU64::new) + .expect("ttl should be between 1 ms and 584942417 years"), + ); submit([register_multisig_account], Metadata::default(), context) .wrap_err("Failed to register multisig account") @@ -1270,14 +1261,6 @@ mod multisig { impl RunArgs for Propose { fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { account } = self; - let registry_id: TriggerId = format!( - "multisig_transactions_{}_{}", - account.signatory(), - account.domain() - ) - .parse() - .unwrap(); let instructions: Vec = { let mut reader = BufReader::new(stdin()); let mut raw_content = Vec::new(); @@ -1287,9 +1270,7 @@ mod multisig { }; let instructions_hash = HashOf::new(&instructions); println!("{instructions_hash}"); - let args = MultisigTransactionArgs::Propose(instructions); - let propose_multisig_transaction = - iroha::data_model::isi::ExecuteTrigger::new(registry_id).with_args(&args); + let propose_multisig_transaction = MultisigPropose::new(self.account, instructions); submit([propose_multisig_transaction], Metadata::default(), context) .wrap_err("Failed to propose transaction") @@ -1309,20 +1290,8 @@ mod multisig { impl RunArgs for Approve { fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Self { - account, - instructions_hash, - } = self; - let registry_id: TriggerId = format!( - "multisig_transactions_{}_{}", - account.signatory(), - account.domain() - ) - .parse() - .unwrap(); - let args = MultisigTransactionArgs::Approve(instructions_hash); let approve_multisig_transaction = - iroha::data_model::isi::ExecuteTrigger::new(registry_id).with_args(&args); + MultisigApprove::new(self.account, self.instructions_hash); submit([approve_multisig_transaction], Metadata::default(), context) .wrap_err("Failed to approve transaction") @@ -1345,6 +1314,22 @@ mod multisig { } } + const DELIMITER: char = '/'; + const PROPOSALS: &str = "proposals"; + const MULTISIG_SIGNATORY: &str = "MULTISIG_SIGNATORY"; + + fn multisig_account_from(role: &RoleId) -> Option { + role.name() + .as_ref() + .strip_prefix(MULTISIG_SIGNATORY)? + .rsplit_once(DELIMITER) + .and_then(|(init, last)| { + format!("{last}@{}", init.trim_matches(DELIMITER)) + .parse() + .ok() + }) + } + /// Recursively trace back to the root multisig account fn trace_back_from( account: AccountId, @@ -1353,42 +1338,27 @@ mod multisig { ) -> Result<()> { let Ok(multisig_roles) = client .query(FindRolesByAccountId::new(account)) - .filter_with(|role_id| role_id.name.starts_with("multisig_signatory_")) + .filter_with(|role_id| role_id.name.starts_with(MULTISIG_SIGNATORY)) .execute_all() else { return Ok(()); }; for role_id in multisig_roles { - let super_account: AccountId = role_id - .name() - .as_ref() - .strip_prefix("multisig_signatory_") - .unwrap() - .replacen('_', "@", 1) - .parse() - .unwrap(); - - trace_back_from(super_account, client, context)?; - - let transactions_registry_id: TriggerId = role_id - .name() - .as_ref() - .replace("signatory", "transactions") - .parse() - .unwrap(); - - context.print_data(&transactions_registry_id)?; - - let transactions_registry = client - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(transactions_registry_id)) + let super_account_id: AccountId = multisig_account_from(&role_id).unwrap(); + + trace_back_from(super_account_id.clone(), client, context)?; + + context.print_data(&super_account_id)?; + + let super_account = client + .query(FindAccounts) + .filter_with(|account| account.id.eq(super_account_id)) .execute_single()?; - let proposal_kvs = transactions_registry - .action() + let proposal_kvs = super_account .metadata() .iter() - .filter(|kv| kv.0.as_ref().starts_with("proposals")); + .filter(|kv| kv.0.as_ref().starts_with(PROPOSALS)); proposal_kvs.fold("", |acc, (k, v)| { let mut path = k.as_ref().split('/'); diff --git a/crates/iroha_data_model/src/visit.rs b/crates/iroha_data_model/src/visit.rs index 92b8709c618..dae06cd82cd 100644 --- a/crates/iroha_data_model/src/visit.rs +++ b/crates/iroha_data_model/src/visit.rs @@ -49,7 +49,7 @@ pub trait Visit { visit_execute_trigger(&ExecuteTrigger), visit_set_parameter(&SetParameter), visit_log(&Log), - visit_custom(&CustomInstruction), + visit_custom_instruction(&CustomInstruction), // Visit SingularQueryBox visit_find_asset_quantity_by_id(&FindAssetQuantityById), @@ -230,7 +230,7 @@ pub fn visit_instruction(visitor: &mut V, isi: &InstructionBo InstructionBox::Transfer(variant_value) => visitor.visit_transfer(variant_value), InstructionBox::Unregister(variant_value) => visitor.visit_unregister(variant_value), InstructionBox::Upgrade(variant_value) => visitor.visit_upgrade(variant_value), - InstructionBox::Custom(custom) => visitor.visit_custom(custom), + InstructionBox::Custom(custom) => visitor.visit_custom_instruction(custom), } } @@ -373,7 +373,7 @@ leaf_visitors! { visit_set_parameter(&SetParameter), visit_execute_trigger(&ExecuteTrigger), visit_log(&Log), - visit_custom(&CustomInstruction), + visit_custom_instruction(&CustomInstruction), // Singular Quert visitors visit_find_asset_quantity_by_id(&FindAssetQuantityById), diff --git a/crates/iroha_executor/src/default/isi/mod.rs b/crates/iroha_executor/src/default/isi/mod.rs new file mode 100644 index 00000000000..d3fddf374c0 --- /dev/null +++ b/crates/iroha_executor/src/default/isi/mod.rs @@ -0,0 +1,49 @@ +use iroha_executor_data_model::isi::multisig::MultisigInstructionBox; + +use super::*; +use crate::prelude::{Execute, Vec, Visit}; + +pub fn visit_custom_instruction( + executor: &mut V, + instruction: &CustomInstruction, +) { + if let Ok(instruction) = MultisigInstructionBox::try_from(instruction.payload()) { + return instruction.visit_execute(executor); + }; + + deny!(executor, "unexpected custom instruction"); +} + +trait VisitExecute: crate::data_model::isi::Instruction { + fn visit_execute(self, executor: &mut V) { + let init_authority = executor.context().authority.clone(); + self.visit(executor); + if executor.verdict().is_ok() { + if let Err(err) = self.execute(executor) { + executor.deny(err); + } + } + executor.context_mut().authority = init_authority; + } + + fn visit(&self, _executor: &mut V) { + unimplemented!("should be overridden unless `Self::visit_execute` is overridden") + } + + fn execute(self, _executor: &mut V) -> Result<(), ValidationFail> { + unimplemented!("should be overridden unless `Self::visit_execute` is overridden") + } +} + +/// Validate and execute instructions in sequence without returning back to the visit root, +/// checking the sanity of the executor verdict +macro_rules! visit_seq { + ($executor:ident.$visit:ident($instruction:expr)) => { + $executor.$visit($instruction); + if $executor.verdict().is_err() { + return $executor.verdict().clone(); + } + }; +} + +mod multisig; diff --git a/crates/iroha_executor/src/default/isi/multisig/account.rs b/crates/iroha_executor/src/default/isi/multisig/account.rs new file mode 100644 index 00000000000..1b27223b93f --- /dev/null +++ b/crates/iroha_executor/src/default/isi/multisig/account.rs @@ -0,0 +1,64 @@ +//! Validation and execution logic of instructions for multisig accounts + +use super::*; + +impl VisitExecute for MultisigRegister { + fn visit(&self, _executor: &mut V) {} + + fn execute(self, executor: &mut V) -> Result<(), ValidationFail> { + let multisig_account = self.account; + let multisig_role = multisig_role_for(&multisig_account); + + // The multisig registrant needs to have sufficient permission to register personal accounts + // TODO Loosen to just being one of the signatories? But impose the procedure of propose and approve? + visit_seq!(executor + .visit_register_account(&Register::account(Account::new(multisig_account.clone())))); + + let domain_owner = executor + .host() + .query(FindDomains) + .filter_with(|domain| domain.id.eq(multisig_account.domain().clone())) + .execute_single() + .dbg_expect("domain should be found as the preceding account registration succeeded") + .owned_by() + .clone(); + + // Authorize as the domain owner: + // Just having permission to register accounts is insufficient to register multisig roles + executor.context_mut().authority = domain_owner.clone(); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + SIGNATORIES.parse().unwrap(), + Json::new(&self.signatories), + ))); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + QUORUM.parse().unwrap(), + Json::new(self.quorum), + ))); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + TRANSACTION_TTL_MS.parse().unwrap(), + Json::new(self.transaction_ttl_ms), + ))); + + visit_seq!(executor.visit_register_role(&Register::role( + // Temporarily grant a multisig role to the domain owner to delegate the role to the signatories + Role::new(multisig_role.clone(), domain_owner.clone()), + ))); + + for signatory in self.signatories.keys().cloned() { + visit_seq!(executor + .visit_grant_account_role(&Grant::account_role(multisig_role.clone(), signatory))); + } + + visit_seq!( + executor.visit_revoke_account_role(&Revoke::account_role(multisig_role, domain_owner)) + ); + + Ok(()) + } +} diff --git a/crates/iroha_executor/src/default/isi/multisig/mod.rs b/crates/iroha_executor/src/default/isi/multisig/mod.rs new file mode 100644 index 00000000000..6ed09717677 --- /dev/null +++ b/crates/iroha_executor/src/default/isi/multisig/mod.rs @@ -0,0 +1,52 @@ +use iroha_executor_data_model::isi::multisig::*; + +use super::*; +use crate::smart_contract::{DebugExpectExt as _, DebugUnwrapExt}; + +mod account; +mod transaction; + +impl VisitExecute for MultisigInstructionBox { + fn visit_execute(self, executor: &mut V) { + match self { + MultisigInstructionBox::Register(instruction) => instruction.visit_execute(executor), + MultisigInstructionBox::Propose(instruction) => instruction.visit_execute(executor), + MultisigInstructionBox::Approve(instruction) => instruction.visit_execute(executor), + } + } +} + +const DELIMITER: char = '/'; +const SIGNATORIES: &str = "signatories"; +const QUORUM: &str = "quorum"; +const TRANSACTION_TTL_MS: &str = "transaction_ttl_ms"; +const PROPOSALS: &str = "proposals"; +const MULTISIG_SIGNATORY: &str = "MULTISIG_SIGNATORY"; + +fn instructions_key(hash: &HashOf>) -> Name { + format!("{PROPOSALS}{DELIMITER}{hash}{DELIMITER}instructions") + .parse() + .unwrap() +} + +fn proposed_at_ms_key(hash: &HashOf>) -> Name { + format!("{PROPOSALS}{DELIMITER}{hash}{DELIMITER}proposed_at_ms") + .parse() + .unwrap() +} + +fn approvals_key(hash: &HashOf>) -> Name { + format!("{PROPOSALS}{DELIMITER}{hash}{DELIMITER}approvals") + .parse() + .unwrap() +} + +fn multisig_role_for(account: &AccountId) -> RoleId { + format!( + "{MULTISIG_SIGNATORY}{DELIMITER}{}{DELIMITER}{}", + account.domain(), + account.signatory(), + ) + .parse() + .unwrap() +} diff --git a/crates/iroha_executor/src/default/isi/multisig/transaction.rs b/crates/iroha_executor/src/default/isi/multisig/transaction.rs new file mode 100644 index 00000000000..542fcc1636b --- /dev/null +++ b/crates/iroha_executor/src/default/isi/multisig/transaction.rs @@ -0,0 +1,249 @@ +//! Validation and execution logic of instructions for multisig transactions + +use alloc::collections::{btree_map::BTreeMap, btree_set::BTreeSet}; + +use super::*; + +impl VisitExecute for MultisigPropose { + fn visit(&self, executor: &mut V) { + let proposer = executor.context().authority.clone(); + let multisig_account = self.account.clone(); + let host = executor.host(); + let instructions_hash = HashOf::new(&self.instructions); + let multisig_role = multisig_role_for(&multisig_account); + let is_downward_proposal = host + .query_single(FindAccountMetadata::new( + proposer.clone(), + SIGNATORIES.parse().unwrap(), + )) + .map_or(false, |proposer_signatories| { + proposer_signatories + .try_into_any::>() + .dbg_unwrap() + .contains_key(&multisig_account) + }); + let has_multisig_role = host + .query(FindRolesByAccountId::new(proposer)) + .filter_with(|role_id| role_id.eq(multisig_role)) + .execute_single() + .is_ok(); + + if !(is_downward_proposal || has_multisig_role) { + deny!(executor, "not qualified to propose multisig"); + }; + + if host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + approvals_key(&instructions_hash), + )) + .is_ok() + { + deny!(executor, "multisig proposal duplicates") + }; + } + + fn execute(self, executor: &mut V) -> Result<(), ValidationFail> { + let proposer = executor.context().authority.clone(); + let multisig_account = self.account; + + // Authorize as the multisig account + executor.context_mut().authority = multisig_account.clone(); + + let instructions_hash = HashOf::new(&self.instructions); + let signatories: BTreeMap = executor + .host() + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + SIGNATORIES.parse().unwrap(), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + let now_ms: u64 = executor + .context() + .curr_block + .creation_time() + .as_millis() + .try_into() + .dbg_expect("shouldn't overflow within 584942417 years"); + let approvals = BTreeSet::from([proposer]); + + // Recursively deploy multisig authentication down to the personal leaf signatories + for signatory in signatories.keys().cloned() { + let is_multisig_again = executor + .host() + .query(FindRoleIds) + .filter_with(|role_id| role_id.eq(multisig_role_for(&signatory))) + .execute_single_opt() + .dbg_unwrap() + .is_some(); + + if is_multisig_again { + let propose_to_approve_me = { + let approve_me = + MultisigApprove::new(multisig_account.clone(), instructions_hash); + + MultisigPropose::new(signatory, [approve_me.into()].to_vec()) + }; + + propose_to_approve_me.visit_execute(executor); + } + } + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + instructions_key(&instructions_hash).clone(), + Json::new(&self.instructions), + ))); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + proposed_at_ms_key(&instructions_hash).clone(), + Json::new(now_ms), + ))); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account, + approvals_key(&instructions_hash).clone(), + Json::new(&approvals), + ))); + + Ok(()) + } +} + +impl VisitExecute for MultisigApprove { + fn visit(&self, executor: &mut V) { + let approver = executor.context().authority.clone(); + let multisig_account = self.account.clone(); + let host = executor.host(); + let multisig_role = multisig_role_for(&multisig_account); + + if host + .query(FindRolesByAccountId::new(approver)) + .filter_with(|role_id| role_id.eq(multisig_role)) + .execute_single() + .is_err() + { + deny!(executor, "not qualified to approve multisig"); + }; + } + + fn execute(self, executor: &mut V) -> Result<(), ValidationFail> { + let approver = executor.context().authority.clone(); + let multisig_account = self.account; + + // Authorize as the multisig account + executor.context_mut().authority = multisig_account.clone(); + + let host = executor.host(); + let instructions_hash = self.instructions_hash; + let signatories: BTreeMap = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + SIGNATORIES.parse().unwrap(), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + let quorum: u16 = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + QUORUM.parse().unwrap(), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + let transaction_ttl_ms: u64 = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + TRANSACTION_TTL_MS.parse().unwrap(), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + let instructions: Vec = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + instructions_key(&instructions_hash), + ))? + .try_into_any() + .dbg_unwrap(); + let proposed_at_ms: u64 = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + proposed_at_ms_key(&instructions_hash), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + let now_ms: u64 = executor + .context() + .curr_block + .creation_time() + .as_millis() + .try_into() + .dbg_expect("shouldn't overflow within 584942417 years"); + let mut approvals: BTreeSet = host + .query_single(FindAccountMetadata::new( + multisig_account.clone(), + approvals_key(&instructions_hash), + )) + .dbg_unwrap() + .try_into_any() + .dbg_unwrap(); + + approvals.insert(approver); + + visit_seq!(executor.visit_set_account_key_value(&SetKeyValue::account( + multisig_account.clone(), + approvals_key(&instructions_hash), + Json::new(&approvals), + ))); + + let is_authenticated = quorum + <= signatories + .into_iter() + .filter(|(id, _)| approvals.contains(id)) + .map(|(_, weight)| u16::from(weight)) + .sum(); + + let is_expired = proposed_at_ms.saturating_add(transaction_ttl_ms) < now_ms; + + if is_authenticated || is_expired { + // Cleanup the transaction entry + visit_seq!( + executor.visit_remove_account_key_value(&RemoveKeyValue::account( + multisig_account.clone(), + approvals_key(&instructions_hash), + )) + ); + + visit_seq!( + executor.visit_remove_account_key_value(&RemoveKeyValue::account( + multisig_account.clone(), + proposed_at_ms_key(&instructions_hash), + )) + ); + + visit_seq!( + executor.visit_remove_account_key_value(&RemoveKeyValue::account( + multisig_account.clone(), + instructions_key(&instructions_hash), + )) + ); + + if is_expired { + // TODO Notify that the proposal has expired, while returning Ok for the entry deletion to take effect + } else { + // Validate and execute the authenticated multisig transaction + for instruction in instructions { + visit_seq!(executor.visit_instruction(&instruction)); + } + } + } + + Ok(()) + } +} diff --git a/crates/iroha_executor/src/default.rs b/crates/iroha_executor/src/default/mod.rs similarity index 93% rename from crates/iroha_executor/src/default.rs rename to crates/iroha_executor/src/default/mod.rs index 19fe09bb666..7a8479a8ee3 100644 --- a/crates/iroha_executor/src/default.rs +++ b/crates/iroha_executor/src/default/mod.rs @@ -17,13 +17,13 @@ pub use asset_definition::{ visit_set_asset_definition_key_value, visit_transfer_asset_definition, visit_unregister_asset_definition, }; -pub use custom::visit_custom; pub use domain::{ visit_register_domain, visit_remove_domain_key_value, visit_set_domain_key_value, visit_transfer_domain, visit_unregister_domain, }; pub use executor::visit_upgrade; use iroha_smart_contract::data_model::{prelude::*, visit::Visit}; +pub use isi::visit_custom_instruction; pub use log::visit_log; pub use parameter::visit_set_parameter; pub use peer::{visit_register_peer, visit_unregister_peer}; @@ -44,6 +44,8 @@ use crate::{ Execute, }; +pub mod isi; + // NOTE: If any new `visit_..` functions are introduced in this module, one should // not forget to update the default executor boilerplate too, specifically the // `iroha_executor::derive::default::impl_derive_visit` function @@ -117,7 +119,7 @@ pub fn visit_instruction(executor: &mut V, isi: &In executor.visit_upgrade(isi); } InstructionBox::Custom(isi) => { - executor.visit_custom(isi); + executor.visit_custom_instruction(isi); } } } @@ -368,9 +370,7 @@ pub mod domain { AnyPermission::CanRegisterTrigger(permission) => { permission.authority.domain() == domain_id } - AnyPermission::CanRegisterAnyTrigger(_) - | AnyPermission::CanUnregisterAnyTrigger(_) - | AnyPermission::CanUnregisterTrigger(_) + AnyPermission::CanUnregisterTrigger(_) | AnyPermission::CanExecuteTrigger(_) | AnyPermission::CanModifyTrigger(_) | AnyPermission::CanModifyTriggerMetadata(_) @@ -548,9 +548,7 @@ pub mod account { AnyPermission::CanBurnAsset(permission) => permission.asset.account() == account_id, AnyPermission::CanTransferAsset(permission) => permission.asset.account() == account_id, AnyPermission::CanRegisterTrigger(permission) => permission.authority == *account_id, - AnyPermission::CanRegisterAnyTrigger(_) - | AnyPermission::CanUnregisterAnyTrigger(_) - | AnyPermission::CanUnregisterTrigger(_) + AnyPermission::CanUnregisterTrigger(_) | AnyPermission::CanExecuteTrigger(_) | AnyPermission::CanModifyTrigger(_) | AnyPermission::CanModifyTriggerMetadata(_) @@ -816,8 +814,6 @@ pub mod asset_definition { AnyPermission::CanUnregisterAccount(_) | AnyPermission::CanRegisterAsset(_) | AnyPermission::CanModifyAccountMetadata(_) - | AnyPermission::CanRegisterAnyTrigger(_) - | AnyPermission::CanUnregisterAnyTrigger(_) | AnyPermission::CanRegisterTrigger(_) | AnyPermission::CanUnregisterTrigger(_) | AnyPermission::CanExecuteTrigger(_) @@ -1167,7 +1163,7 @@ pub mod parameter { } pub mod role { - use iroha_executor_data_model::permission::{role::CanManageRoles, trigger::CanExecuteTrigger}; + use iroha_executor_data_model::permission::role::CanManageRoles; use iroha_smart_contract::{data_model::role::Role, Iroha}; use super::*; @@ -1235,40 +1231,49 @@ pub mod role { isi: &Register, ) { let role = isi.object(); + let grant_role = &Grant::account_role(role.id().clone(), role.grant_to().clone()); let mut new_role = Role::new(role.id().clone(), role.grant_to().clone()); // Exception for multisig roles - let mut is_multisig_role = false; - if let Some(tail) = role - .id() - .name() - .as_ref() - .strip_prefix("multisig_signatory_") { - let Ok(account_id) = tail.replacen('_', "@", 1).parse::() else { - deny!(executor, "Violates multisig role format") - }; - if crate::permission::domain::is_domain_owner( - account_id.domain(), - &executor.context().authority, - executor.host(), - ) - .unwrap_or_default() - { - // Bind this role to this permission here, regardless of the given contains - let permission = CanExecuteTrigger { - trigger: format!( - "multisig_transactions_{}_{}", - account_id.signatory(), - account_id.domain() + use crate::permission::domain::is_domain_owner; + + const DELIMITER: char = '/'; + const MULTISIG_SIGNATORY: &str = "MULTISIG_SIGNATORY"; + + fn multisig_account_from(role: &RoleId) -> Option { + role.name() + .as_ref() + .strip_prefix(MULTISIG_SIGNATORY)? + .rsplit_once(DELIMITER) + .and_then(|(init, last)| { + format!("{last}@{}", init.trim_matches(DELIMITER)) + .parse() + .ok() + }) + } + + if role.id().name().as_ref().starts_with(MULTISIG_SIGNATORY) { + if let Some(multisig_account) = multisig_account_from(role.id()) { + if is_domain_owner( + multisig_account.domain(), + &executor.context().authority, + executor.host(), ) - .parse() - .unwrap(), - }; - new_role = new_role.add_permission(permission); - is_multisig_role = true; - } else { - deny!(executor, "Can't register multisig role") + .unwrap_or_default() + { + let isi = &Register::role(new_role); + if let Err(err) = executor.host().submit(isi) { + deny!(executor, err); + } + execute!(executor, grant_role); + } + deny!( + executor, + "only the domain owner can register multisig roles" + ) + } + deny!(executor, "violates multisig role name format") } } @@ -1298,12 +1303,10 @@ pub mod role { if executor.context().curr_block.is_genesis() || CanManageRoles.is_owned_by(&executor.context().authority, executor.host()) - || is_multisig_role { - let grant_role = &Grant::account_role(role.id().clone(), role.grant_to().clone()); let isi = &Register::role(new_role); if let Err(err) = executor.host().submit(isi) { - executor.deny(err); + deny!(executor, err); } execute!(executor, grant_role); @@ -1357,8 +1360,8 @@ pub mod role { pub mod trigger { use iroha_executor_data_model::permission::trigger::{ - CanExecuteTrigger, CanModifyTrigger, CanModifyTriggerMetadata, CanRegisterAnyTrigger, - CanRegisterTrigger, CanUnregisterAnyTrigger, CanUnregisterTrigger, + CanExecuteTrigger, CanModifyTrigger, CanModifyTriggerMetadata, CanRegisterTrigger, + CanUnregisterTrigger, }; use iroha_smart_contract::data_model::trigger::Trigger; @@ -1374,37 +1377,6 @@ pub mod trigger { let trigger = isi.object(); let is_genesis = executor.context().curr_block.is_genesis(); - let trigger_name = trigger.id().name().as_ref(); - - #[expect(clippy::option_if_let_else)] // clippy suggestion spoils readability - let naming_is_ok = if let Some(tail) = trigger_name.strip_prefix("multisig_accounts_") { - let system_account: AccountId = - // predefined in `GenesisBuilder::default` - "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7@system" - .parse() - .unwrap(); - tail.parse::().is_ok() - && (is_genesis || executor.context().authority == system_account) - } else if let Some(tail) = trigger_name.strip_prefix("multisig_transactions_") { - tail.replacen('_', "@", 1) - .parse::() - .ok() - .and_then(|account_id| { - is_domain_owner( - account_id.domain(), - &executor.context().authority, - executor.host(), - ) - .ok() - }) - .unwrap_or_default() - } else { - true - }; - if !naming_is_ok { - deny!(executor, "Violates trigger naming restrictions"); - } - if is_genesis || { match is_domain_owner( @@ -1423,7 +1395,6 @@ pub mod trigger { can_register_user_trigger_token .is_owned_by(&executor.context().authority, executor.host()) } - || CanRegisterAnyTrigger.is_owned_by(&executor.context().authority, executor.host()) { execute!(executor, isi) } @@ -1448,7 +1419,6 @@ pub mod trigger { can_unregister_user_trigger_token .is_owned_by(&executor.context().authority, executor.host()) } - || CanUnregisterAnyTrigger.is_owned_by(&executor.context().authority, executor.host()) { let mut err = None; for (owner_id, permission) in accounts_permissions(executor.host()) { @@ -1557,20 +1527,6 @@ pub mod trigger { if can_execute_trigger_token.is_owned_by(authority, executor.host()) { execute!(executor, isi); } - // Any account in domain can call multisig accounts registry to register any multisig account in the domain - // TODO Restrict access to the multisig signatories? - // TODO Impose proposal and approval process? - if trigger_id - .name() - .as_ref() - .strip_prefix("multisig_accounts_") - .and_then(|s| s.parse::().ok()) - .map_or(false, |registry_domain| { - *authority.domain() == registry_domain - }) - { - execute!(executor, isi); - } deny!(executor, "Can't execute trigger owned by another account"); } @@ -1644,9 +1600,7 @@ pub mod trigger { AnyPermission::CanModifyTriggerMetadata(permission) => { &permission.trigger == trigger_id } - AnyPermission::CanRegisterAnyTrigger(_) - | AnyPermission::CanUnregisterAnyTrigger(_) - | AnyPermission::CanRegisterTrigger(_) + AnyPermission::CanRegisterTrigger(_) | AnyPermission::CanManagePeers(_) | AnyPermission::CanRegisterDomain(_) | AnyPermission::CanUnregisterDomain(_) @@ -1745,14 +1699,3 @@ pub mod log { execute!(executor, isi) } } - -pub mod custom { - use super::*; - - pub fn visit_custom(executor: &mut V, _isi: &CustomInstruction) { - deny!( - executor, - "Custom instructions should be handled in custom executor" - ) - } -} diff --git a/crates/iroha_executor/src/lib.rs b/crates/iroha_executor/src/lib.rs index a6146e50b82..bce5b1cbe12 100644 --- a/crates/iroha_executor/src/lib.rs +++ b/crates/iroha_executor/src/lib.rs @@ -294,6 +294,10 @@ pub trait Execute { /// Represents the current state of the world fn context(&self) -> &prelude::Context; + /// Mutable context for e.g. switching to another authority after validation before execution. + /// Note that mutations are persistent to the instance unless reset + fn context_mut(&mut self) -> &mut prelude::Context; + /// Executor verdict. fn verdict(&self) -> &Result; diff --git a/crates/iroha_executor/src/permission.rs b/crates/iroha_executor/src/permission.rs index f39cc199d71..7460b5e0df4 100644 --- a/crates/iroha_executor/src/permission.rs +++ b/crates/iroha_executor/src/permission.rs @@ -116,8 +116,6 @@ declare_permissions! { iroha_executor_data_model::permission::parameter::{CanSetParameters}, iroha_executor_data_model::permission::role::{CanManageRoles}, - iroha_executor_data_model::permission::trigger::{CanRegisterAnyTrigger}, - iroha_executor_data_model::permission::trigger::{CanUnregisterAnyTrigger}, iroha_executor_data_model::permission::trigger::{CanRegisterTrigger}, iroha_executor_data_model::permission::trigger::{CanUnregisterTrigger}, iroha_executor_data_model::permission::trigger::{CanModifyTrigger}, @@ -755,8 +753,8 @@ pub mod account { pub mod trigger { //! Module with pass conditions for trigger related tokens use iroha_executor_data_model::permission::trigger::{ - CanExecuteTrigger, CanModifyTrigger, CanModifyTriggerMetadata, CanRegisterAnyTrigger, - CanRegisterTrigger, CanUnregisterAnyTrigger, CanUnregisterTrigger, + CanExecuteTrigger, CanModifyTrigger, CanModifyTriggerMetadata, CanRegisterTrigger, + CanUnregisterTrigger, }; use super::*; @@ -820,34 +818,6 @@ pub mod trigger { } } - impl ValidateGrantRevoke for CanRegisterAnyTrigger { - fn validate_grant(&self, authority: &AccountId, context: &Context, host: &Iroha) -> Result { - OnlyGenesis::from(self).validate(authority, host, context) - } - fn validate_revoke( - &self, - authority: &AccountId, - context: &Context, - host: &Iroha, - ) -> Result { - OnlyGenesis::from(self).validate(authority, host, context) - } - } - - impl ValidateGrantRevoke for CanUnregisterAnyTrigger { - fn validate_grant(&self, authority: &AccountId, context: &Context, host: &Iroha) -> Result { - OnlyGenesis::from(self).validate(authority, host, context) - } - fn validate_revoke( - &self, - authority: &AccountId, - context: &Context, - host: &Iroha, - ) -> Result { - OnlyGenesis::from(self).validate(authority, host, context) - } - } - impl ValidateGrantRevoke for CanRegisterTrigger { fn validate_grant(&self, authority: &AccountId, context: &Context, host: &Iroha) -> Result { super::account::Owner::from(self).validate(authority, host, context) diff --git a/crates/iroha_executor_data_model/Cargo.toml b/crates/iroha_executor_data_model/Cargo.toml index 4627792101d..df1d60ab4da 100644 --- a/crates/iroha_executor_data_model/Cargo.toml +++ b/crates/iroha_executor_data_model/Cargo.toml @@ -16,5 +16,6 @@ iroha_executor_data_model_derive = { path = "../iroha_executor_data_model_derive iroha_data_model.workspace = true iroha_schema.workspace = true +derive_more = { workspace = true, features = ["constructor", "from"] } serde.workspace = true serde_json.workspace = true diff --git a/crates/iroha_executor_data_model/src/isi.rs b/crates/iroha_executor_data_model/src/isi.rs new file mode 100644 index 00000000000..255df1df20e --- /dev/null +++ b/crates/iroha_executor_data_model/src/isi.rs @@ -0,0 +1,118 @@ +//! Types for custom instructions + +use alloc::{collections::btree_map::BTreeMap, format, string::String, vec::Vec}; + +use derive_more::{Constructor, From}; +use iroha_data_model::{ + isi::{CustomInstruction, Instruction, InstructionBox}, + prelude::{Json, *}, +}; +use iroha_schema::IntoSchema; +use serde::{Deserialize, Serialize}; + +use super::*; + +macro_rules! impl_custom_instruction { + ($box:ty, $($instruction:ty)|+) => { + impl Instruction for $box {} + + impl From<$box> for InstructionBox { + fn from(value: $box) -> Self { + Self::Custom(value.into()) + } + } + + impl From<$box> for CustomInstruction { + fn from(value: $box) -> Self { + let payload = serde_json::to_value(&value) + .expect(concat!("INTERNAL BUG: Couldn't serialize ", stringify!($box))); + + Self::new(payload) + } + } + + impl TryFrom<&Json> for $box { + type Error = serde_json::Error; + + fn try_from(payload: &Json) -> serde_json::Result { + serde_json::from_str::(payload.as_ref()) + } + } $( + + impl Instruction for $instruction {} + + impl From<$instruction> for InstructionBox { + fn from(value: $instruction) -> Self { + Self::Custom(<$box>::from(value).into()) + } + })+ + }; +} + +/// Types for multisig instructions +pub mod multisig { + use core::num::{NonZeroU16, NonZeroU64}; + + use super::*; + + /// Multisig-related instructions + #[derive(Debug, Clone, Serialize, Deserialize, IntoSchema, From)] + pub enum MultisigInstructionBox { + /// Register a multisig account, which is a prerequisite of multisig transactions + Register(MultisigRegister), + /// Propose a multisig transaction and initialize approvals with the proposer's one + Propose(MultisigPropose), + /// Approve a certain multisig transaction + Approve(MultisigApprove), + } + + /// Register a multisig account, which is a prerequisite of multisig transactions + #[derive(Debug, Clone, Serialize, Deserialize, IntoSchema, Constructor)] + pub struct MultisigRegister { + /// Multisig account to be registered + ///
+ /// + /// Any corresponding private key allows the owner to manipulate this account as a ordinary personal account + /// + ///
+ // FIXME #5022 prevent multisig monopoly + // FIXME #5022 stop accepting user input: otherwise, after #4426 pre-registration account will be hijacked as a multisig account + pub account: AccountId, + /// List of signatories and their relative weights of responsibility for the multisig account + pub signatories: BTreeMap, + /// Threshold of total weight at which the multisig account is considered authenticated + pub quorum: NonZeroU16, + /// Multisig transaction time-to-live in milliseconds based on block timestamps. Defaults to [`DEFAULT_MULTISIG_TTL_MS`] + pub transaction_ttl_ms: NonZeroU64, + } + + /// Relative weight of responsibility for the multisig account. + /// 0 is allowed for observers who don't join governance + type Weight = u8; + + /// Default multisig transaction time-to-live in milliseconds based on block timestamps + pub const DEFAULT_MULTISIG_TTL_MS: u64 = 60 * 60 * 1_000; // 1 hour + + /// Propose a multisig transaction and initialize approvals with the proposer's one + #[derive(Debug, Clone, Serialize, Deserialize, IntoSchema, Constructor)] + pub struct MultisigPropose { + /// Multisig account to propose + pub account: AccountId, + /// Proposal contents + pub instructions: Vec, + } + + /// Approve a certain multisig transaction + #[derive(Debug, Clone, Serialize, Deserialize, IntoSchema, Constructor)] + pub struct MultisigApprove { + /// Multisig account to approve + pub account: AccountId, + /// Proposal to approve + pub instructions_hash: HashOf>, + } + + impl_custom_instruction!( + MultisigInstructionBox, + MultisigRegister | MultisigPropose | MultisigApprove + ); +} diff --git a/crates/iroha_executor_data_model/src/lib.rs b/crates/iroha_executor_data_model/src/lib.rs index 7695f7e384b..125a05dcf4e 100644 --- a/crates/iroha_executor_data_model/src/lib.rs +++ b/crates/iroha_executor_data_model/src/lib.rs @@ -4,6 +4,7 @@ extern crate alloc; extern crate self as iroha_executor_data_model; +pub mod isi; pub mod parameter; pub mod permission; diff --git a/crates/iroha_executor_data_model/src/permission.rs b/crates/iroha_executor_data_model/src/permission.rs index dc950a197bb..27778496268 100644 --- a/crates/iroha_executor_data_model/src/permission.rs +++ b/crates/iroha_executor_data_model/src/permission.rs @@ -178,16 +178,6 @@ pub mod asset { pub mod trigger { use super::*; - permission! { - #[derive(Copy)] - pub struct CanRegisterAnyTrigger; - } - - permission! { - #[derive(Copy)] - pub struct CanUnregisterAnyTrigger; - } - permission! { pub struct CanRegisterTrigger { pub authority: AccountId, diff --git a/crates/iroha_executor_derive/src/default.rs b/crates/iroha_executor_derive/src/default.rs index ca9e0abc80a..03c52f33998 100644 --- a/crates/iroha_executor_derive/src/default.rs +++ b/crates/iroha_executor_derive/src/default.rs @@ -155,7 +155,7 @@ pub fn impl_derive_visit(emitter: &mut Emitter, input: &syn::DeriveInput) -> Tok "fn visit_set_parameter(operation: &SetParameter)", "fn visit_upgrade(operation: &Upgrade)", "fn visit_log(operation: &Log)", - "fn visit_custom(operation: &CustomInstruction)", + "fn visit_custom_instruction(operation: &CustomInstruction)", ] .into_iter() .map(|item| { @@ -235,6 +235,10 @@ pub fn impl_derive_execute(emitter: &mut Emitter, input: &syn::DeriveInput) -> T &self.context } + fn context_mut(&mut self) -> &mut ::iroha_executor::prelude::Context { + &mut self.context + } + fn verdict(&self) -> &::iroha_executor::prelude::Result { &self.verdict } diff --git a/crates/iroha_genesis/src/lib.rs b/crates/iroha_genesis/src/lib.rs index 7d11ccd9f63..da671bafec9 100644 --- a/crates/iroha_genesis/src/lib.rs +++ b/crates/iroha_genesis/src/lib.rs @@ -12,9 +12,6 @@ use derive_more::Constructor; use eyre::{eyre, Result, WrapErr}; use iroha_crypto::KeyPair; use iroha_data_model::{block::SignedBlock, parameter::Parameter, prelude::*}; -use iroha_executor_data_model::permission::trigger::{ - CanRegisterAnyTrigger, CanUnregisterAnyTrigger, -}; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -22,21 +19,6 @@ use serde::{Deserialize, Serialize}; /// Domain of the genesis account, technically required for the pre-genesis state pub static GENESIS_DOMAIN_ID: LazyLock = LazyLock::new(|| "genesis".parse().unwrap()); -/// Domain of the system account, implicitly registered in the genesis -pub static SYSTEM_DOMAIN_ID: LazyLock = LazyLock::new(|| "system".parse().unwrap()); - -/// The root authority for internal operations, implicitly registered in the genesis -// FIXME #5022 deny external access -// kagami crypto --seed "system" -pub static SYSTEM_ACCOUNT_ID: LazyLock = LazyLock::new(|| { - AccountId::new( - SYSTEM_DOMAIN_ID.clone(), - "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7" - .parse() - .unwrap(), - ) -}); - /// Genesis block. /// /// First transaction must contain single [`Upgrade`] instruction to set executor. @@ -249,37 +231,6 @@ impl GenesisBuilder { } } - /// Entry system entities to serve standard functionality. - pub fn install_libs(self) -> Self { - // Register a trigger that reacts to domain creation (or owner changes) and registers (or replaces) a multisig accounts registry for the domain - let multisig_domains_initializer = GenesisWasmTrigger::new( - "multisig_domains".parse().unwrap(), - GenesisWasmAction::new( - "multisig_domains.wasm", - Repeats::Indefinitely, - SYSTEM_ACCOUNT_ID.clone(), - DomainEventFilter::new() - .for_events(DomainEventSet::Created | DomainEventSet::OwnerChanged), - ), - ); - let instructions = vec![ - Register::domain(Domain::new(SYSTEM_DOMAIN_ID.clone())).into(), - Register::account(Account::new(SYSTEM_ACCOUNT_ID.clone())).into(), - Grant::account_permission(CanRegisterAnyTrigger, SYSTEM_ACCOUNT_ID.clone()).into(), - Grant::account_permission(CanUnregisterAnyTrigger, SYSTEM_ACCOUNT_ID.clone()).into(), - ]; - - Self { - chain: self.chain, - executor: self.executor, - parameters: self.parameters, - instructions, - wasm_dir: self.wasm_dir, - wasm_triggers: vec![multisig_domains_initializer], - topology: self.topology, - } - } - /// Entry a domain registration and transition to [`GenesisDomainBuilder`]. pub fn domain(self, domain_name: Name) -> GenesisDomainBuilder { self.domain_with_metadata(domain_name, Metadata::default()) diff --git a/crates/iroha_kagami/src/genesis/generate.rs b/crates/iroha_kagami/src/genesis/generate.rs index 441e294c8f2..227b7395ec6 100644 --- a/crates/iroha_kagami/src/genesis/generate.rs +++ b/crates/iroha_kagami/src/genesis/generate.rs @@ -9,9 +9,7 @@ use iroha_data_model::{isi::InstructionBox, parameter::Parameters, prelude::*}; use iroha_executor_data_model::permission::{ domain::CanRegisterDomain, parameter::CanSetParameters, }; -use iroha_genesis::{ - GenesisBuilder, GenesisWasmAction, GenesisWasmTrigger, RawGenesisTransaction, GENESIS_DOMAIN_ID, -}; +use iroha_genesis::{GenesisBuilder, RawGenesisTransaction, GENESIS_DOMAIN_ID}; use iroha_test_samples::{gen_account_in, ALICE_ID, BOB_ID, CARPENTER_ID}; use crate::{Outcome, RunArgs}; @@ -66,7 +64,7 @@ impl RunArgs for Args { } = self; let chain = ChainId::from("00000000-0000-0000-0000-000000000000"); - let builder = GenesisBuilder::new(chain, executor, wasm_dir).install_libs(); + let builder = GenesisBuilder::new(chain, executor, wasm_dir); let genesis = match mode.unwrap_or_default() { Mode::Default => generate_default(builder, genesis_public_key), Mode::Synthetic { @@ -151,24 +149,6 @@ pub fn generate_default( builder = builder.append_instruction(isi); } - // Manually register a multisig accounts registry for wonderland whose creation in genesis does not trigger the initializer - let multisig_accounts_registry_for_wonderland = { - let domain_owner = ALICE_ID.clone(); - let registry_id = "multisig_accounts_wonderland".parse::().unwrap(); - - GenesisWasmTrigger::new( - registry_id.clone(), - GenesisWasmAction::new( - "multisig_accounts.wasm", - Repeats::Indefinitely, - domain_owner, - ExecuteTriggerEventFilter::new().for_trigger(registry_id), - ), - ) - }; - - builder = builder.append_wasm_trigger(multisig_accounts_registry_for_wonderland); - Ok(builder.build_raw()) } diff --git a/crates/iroha_schema/src/lib.rs b/crates/iroha_schema/src/lib.rs index 7318842a8d3..45c9e78c5e6 100644 --- a/crates/iroha_schema/src/lib.rs +++ b/crates/iroha_schema/src/lib.rs @@ -16,7 +16,7 @@ use alloc::{ vec::Vec, }; use core::{ - num::{NonZeroU32, NonZeroU64}, + num::{NonZeroU16, NonZeroU32, NonZeroU64}, ops::RangeInclusive, }; @@ -342,7 +342,7 @@ macro_rules! impl_schema_non_zero_int { )*}; } -impl_schema_non_zero_int!(NonZeroU64 => u64, NonZeroU32 => u32); +impl_schema_non_zero_int!(NonZeroU64 => u64, NonZeroU32 => u32, NonZeroU16 => u16); impl TypeId for String { fn id() -> String { diff --git a/crates/iroha_schema_gen/Cargo.toml b/crates/iroha_schema_gen/Cargo.toml index 47822214c03..9fa7345a374 100644 --- a/crates/iroha_schema_gen/Cargo.toml +++ b/crates/iroha_schema_gen/Cargo.toml @@ -14,7 +14,6 @@ workspace = true # TODO: `transparent_api` feature shouldn't be activated/required here iroha_data_model = { workspace = true, features = ["http", "transparent_api"] } iroha_executor_data_model = { workspace = true } -iroha_multisig_data_model = { workspace = true } iroha_primitives = { workspace = true } iroha_genesis = { workspace = true } diff --git a/crates/iroha_schema_gen/src/lib.rs b/crates/iroha_schema_gen/src/lib.rs index 7fd0f8887a6..0c13c559cc4 100644 --- a/crates/iroha_schema_gen/src/lib.rs +++ b/crates/iroha_schema_gen/src/lib.rs @@ -34,8 +34,7 @@ macro_rules! types { /// shall be included recursively. pub fn build_schemas() -> MetaMap { use iroha_data_model::prelude::*; - use iroha_executor_data_model::permission; - use iroha_multisig_data_model as multisig; + use iroha_executor_data_model::{isi::multisig, permission}; macro_rules! schemas { ($($t:ty),* $(,)?) => {{ @@ -85,8 +84,6 @@ pub fn build_schemas() -> MetaMap { permission::asset::CanModifyAssetMetadata, permission::parameter::CanSetParameters, permission::role::CanManageRoles, - permission::trigger::CanRegisterAnyTrigger, - permission::trigger::CanUnregisterAnyTrigger, permission::trigger::CanRegisterTrigger, permission::trigger::CanExecuteTrigger, permission::trigger::CanUnregisterTrigger, @@ -94,9 +91,8 @@ pub fn build_schemas() -> MetaMap { permission::trigger::CanModifyTriggerMetadata, permission::executor::CanUpgradeExecutor, - // Arguments attached to multi-signature operations - multisig::MultisigAccountArgs, - multisig::MultisigTransactionArgs, + // Multi-signature operations + multisig::MultisigInstructionBox, // Genesis file - used by SDKs to generate the genesis block // TODO: IMO it could/should be removed from the schema @@ -287,13 +283,12 @@ types!( MintabilityError, Mintable, Mismatch, - MultisigAccountArgs, - MultisigTransactionArgs, Name, NewAccount, NewAssetDefinition, NewDomain, NewRole, + NonZeroU16, NonZeroU32, NonZeroU64, Numeric, @@ -507,7 +502,7 @@ types!( pub mod complete_data_model { //! Complete set of types participating in the schema - pub use core::num::{NonZeroU32, NonZeroU64}; + pub use core::num::{NonZeroU16, NonZeroU32, NonZeroU64}; pub use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; pub use iroha_crypto::*; @@ -551,7 +546,6 @@ pub mod complete_data_model { Level, }; pub use iroha_genesis::{GenesisWasmAction, GenesisWasmTrigger, WasmPath}; - pub use iroha_multisig_data_model::{MultisigAccountArgs, MultisigTransactionArgs}; pub use iroha_primitives::{const_vec::ConstVec, conststr::ConstString, json::Json}; pub use iroha_schema::Compact; } @@ -621,12 +615,6 @@ mod tests { insert_into_test_map!(iroha_executor_data_model::permission::asset::CanModifyAssetMetadata); insert_into_test_map!(iroha_executor_data_model::permission::parameter::CanSetParameters); insert_into_test_map!(iroha_executor_data_model::permission::role::CanManageRoles); - insert_into_test_map!( - iroha_executor_data_model::permission::trigger::CanRegisterAnyTrigger - ); - insert_into_test_map!( - iroha_executor_data_model::permission::trigger::CanUnregisterAnyTrigger - ); insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanRegisterTrigger); insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanExecuteTrigger); insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanUnregisterTrigger); @@ -636,6 +624,11 @@ mod tests { ); insert_into_test_map!(iroha_executor_data_model::permission::executor::CanUpgradeExecutor); + insert_into_test_map!(iroha_executor_data_model::isi::multisig::MultisigInstructionBox); + insert_into_test_map!(iroha_executor_data_model::isi::multisig::MultisigRegister); + insert_into_test_map!(iroha_executor_data_model::isi::multisig::MultisigPropose); + insert_into_test_map!(iroha_executor_data_model::isi::multisig::MultisigApprove); + map } diff --git a/crates/iroha_test_network/src/lib.rs b/crates/iroha_test_network/src/lib.rs index 05cf094b246..5058c9417d3 100644 --- a/crates/iroha_test_network/src/lib.rs +++ b/crates/iroha_test_network/src/lib.rs @@ -783,7 +783,7 @@ impl NetworkPeer { /// Generated [`PeerId`] pub fn peer_id(&self) -> PeerId { - self.id.id.clone() + self.id.id().clone() } /// Check whether the peer is running diff --git a/data_model/libs/iroha_multisig_data_model/Cargo.toml b/data_model/libs/iroha_multisig_data_model/Cargo.toml deleted file mode 100644 index a104d502956..00000000000 --- a/data_model/libs/iroha_multisig_data_model/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "iroha_multisig_data_model" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lints] -workspace = true - -[dependencies] -iroha_data_model.workspace = true -iroha_schema.workspace = true - -parity-scale-codec = { workspace = true, features = ["derive"] } -serde.workspace = true -serde_json.workspace = true diff --git a/data_model/libs/iroha_multisig_data_model/src/lib.rs b/data_model/libs/iroha_multisig_data_model/src/lib.rs deleted file mode 100644 index 6e83490cd62..00000000000 --- a/data_model/libs/iroha_multisig_data_model/src/lib.rs +++ /dev/null @@ -1,74 +0,0 @@ -//! Arguments attached on executing triggers for multisig accounts or transactions - -#![no_std] - -extern crate alloc; - -use alloc::{collections::btree_map::BTreeMap, format, string::String, vec::Vec}; - -use iroha_data_model::prelude::*; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; - -/// Arguments to register multisig account -#[derive(Debug, Clone, Decode, Encode, Serialize, Deserialize, IntoSchema)] -pub struct MultisigAccountArgs { - /// Multisig account to be registered - ///
- /// - /// Any corresponding private key allows the owner to manipulate this account as a ordinary personal account - /// - ///
- // FIXME #5022 prevent multisig monopoly - // FIXME #5022 stop accepting user input: otherwise, after #4426 pre-registration account will be hijacked as a multisig account - pub account: PublicKey, - /// List of accounts and their relative weights of responsibility for the multisig - pub signatories: BTreeMap, - /// Threshold of total weight at which the multisig is considered authenticated - pub quorum: u16, - /// Multisig transaction time-to-live in milliseconds based on block timestamps. Defaults to [`DEFAULT_MULTISIG_TTL_MS`] - pub transaction_ttl_ms: u64, -} - -type Weight = u8; - -/// Default multisig transaction time-to-live in milliseconds based on block timestamps -pub const DEFAULT_MULTISIG_TTL_MS: u64 = 60 * 60 * 1_000; // 1 hour - -/// Arguments to propose or approve multisig transaction -#[derive(Debug, Clone, Decode, Encode, Serialize, Deserialize, IntoSchema)] -pub enum MultisigTransactionArgs { - /// Propose instructions and initialize approvals with the proposer's one - Propose(Vec), - /// Approve certain instructions - Approve(HashOf>), -} - -impl From for Json { - fn from(details: MultisigAccountArgs) -> Self { - Json::new(details) - } -} - -impl TryFrom<&Json> for MultisigAccountArgs { - type Error = serde_json::Error; - - fn try_from(payload: &Json) -> serde_json::Result { - serde_json::from_str::(payload.as_ref()) - } -} - -impl From for Json { - fn from(details: MultisigTransactionArgs) -> Self { - Json::new(details) - } -} - -impl TryFrom<&Json> for MultisigTransactionArgs { - type Error = serde_json::Error; - - fn try_from(payload: &Json) -> serde_json::Result { - serde_json::from_str::(payload.as_ref()) - } -} diff --git a/defaults/genesis.json b/defaults/genesis.json index 58e7993996d..a859de841cf 100644 --- a/defaults/genesis.json +++ b/defaults/genesis.json @@ -24,45 +24,6 @@ } }, "instructions": [ - { - "Register": { - "Domain": { - "id": "system", - "logo": null, - "metadata": {} - } - } - }, - { - "Register": { - "Account": { - "id": "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7@system", - "metadata": {} - } - } - }, - { - "Grant": { - "Permission": { - "object": { - "name": "CanRegisterAnyTrigger", - "payload": null - }, - "destination": "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7@system" - } - } - }, - { - "Grant": { - "Permission": { - "object": { - "name": "CanUnregisterAnyTrigger", - "payload": null - }, - "destination": "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7@system" - } - } - }, { "Register": { "Domain": { @@ -191,40 +152,6 @@ } ], "wasm_dir": "libs", - "wasm_triggers": [ - { - "id": "multisig_domains", - "action": { - "executable": "multisig_domains.wasm", - "repeats": "Indefinitely", - "authority": "ed0120D8B64D62FD8E09B9F29FE04D9C63E312EFB1CB29F1BF6AF00EBC263007AE75F7@system", - "filter": { - "Data": { - "Domain": { - "id_matcher": null, - "event_set": [ - "Created", - "OwnerChanged" - ] - } - } - } - } - }, - { - "id": "multisig_accounts_wonderland", - "action": { - "executable": "multisig_accounts.wasm", - "repeats": "Indefinitely", - "authority": "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland", - "filter": { - "ExecuteTrigger": { - "trigger_id": "multisig_accounts_wonderland", - "authority": null - } - } - } - } - ], + "wasm_triggers": [], "topology": [] } diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index 375ab8fcc43..dc3c8ea26b3 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -872,7 +872,6 @@ } ] }, - "CanRegisterAnyTrigger": null, "CanRegisterAsset": { "Struct": [ { @@ -930,7 +929,6 @@ } ] }, - "CanUnregisterAnyTrigger": null, "CanUnregisterAsset": { "Struct": [ { @@ -2601,37 +2599,66 @@ } ] }, - "MultisigAccountArgs": { + "MultisigApprove": { "Struct": [ { "name": "account", - "type": "PublicKey" + "type": "AccountId" }, { - "name": "signatories", - "type": "SortedMap" + "name": "instructions_hash", + "type": "HashOf>" + } + ] + }, + "MultisigInstructionBox": { + "Enum": [ + { + "tag": "Register", + "discriminant": 0, + "type": "MultisigRegister" }, { - "name": "quorum", - "type": "u16" + "tag": "Propose", + "discriminant": 1, + "type": "MultisigPropose" }, { - "name": "transaction_ttl_ms", - "type": "u64" + "tag": "Approve", + "discriminant": 2, + "type": "MultisigApprove" } ] }, - "MultisigTransactionArgs": { - "Enum": [ + "MultisigPropose": { + "Struct": [ { - "tag": "Propose", - "discriminant": 0, + "name": "account", + "type": "AccountId" + }, + { + "name": "instructions", "type": "Vec" + } + ] + }, + "MultisigRegister": { + "Struct": [ + { + "name": "account", + "type": "AccountId" }, { - "tag": "Approve", - "discriminant": 1, - "type": "HashOf>" + "name": "signatories", + "type": "SortedMap" + }, + { + "name": "quorum", + "type": "NonZero" + }, + { + "name": "transaction_ttl_ms", + "type": "NonZero" } ] }, @@ -2700,6 +2727,7 @@ } ] }, + "NonZero": "u16", "NonZero": "u32", "NonZero": "u64", "Numeric": { diff --git a/scripts/build_wasm.sh b/scripts/build_wasm.sh index 8e28b719594..da0ee864647 100755 --- a/scripts/build_wasm.sh +++ b/scripts/build_wasm.sh @@ -10,9 +10,6 @@ build() { "libs") NAMES=( # order by dependency - "multisig_transactions" - "multisig_accounts" - "multisig_domains" "default_executor" ) ;; diff --git a/scripts/tests/instructions.json b/scripts/tests/instructions.json index 5385f812f03..a7dc30cfffb 100644 --- a/scripts/tests/instructions.json +++ b/scripts/tests/instructions.json @@ -3,7 +3,7 @@ "SetKeyValue": { "Account": { "object": "ed01201F89368A4F322263C6F1AEF156759A83FB1AD7D93BAA66BFDFA973ACBADA462F@wonderland", - "key": "key", + "key": "success_marker", "value": "congratulations" } } diff --git a/scripts/tests/multisig.recursion.sh b/scripts/tests/multisig.recursion.sh index cc69c33723e..1561ca2cf3e 100644 --- a/scripts/tests/multisig.recursion.sh +++ b/scripts/tests/multisig.recursion.sh @@ -68,25 +68,14 @@ INSTRUCTIONS="../scripts/tests/instructions.json" propose_stdout=($(cat $INSTRUCTIONS | ./iroha --config "client.0.toml" multisig propose --account $MSA_012345)) INSTRUCTIONS_HASH=${propose_stdout[0]} -# ticks as many times as the multisig recursion -TICK="../scripts/tests/tick.json" -for i in $(seq 0 1); do - cat $TICK | ./iroha json transaction -done - # check that one of the leaf signatories is involved LIST=$(./iroha --config "client.5.toml" multisig list all) echo "$LIST" | grep $INSTRUCTIONS_HASH # approve the multisig transaction -HASH_TO_12345=$(echo "$LIST" | grep -A1 "multisig_transactions" | sed 's/_/@/g' | grep -A1 $MSA_345 | tail -n 1 | tr -d '"') +HASH_TO_12345=$(echo "$LIST" | grep -A1 $MSA_345 | tail -n 1 | tr -d '"') ./iroha --config "client.5.toml" multisig approve --account $MSA_345 --instructions-hash $HASH_TO_12345 -# ticks as many times as the multisig recursion -for i in $(seq 0 1); do - cat $TICK | ./iroha json transaction -done - # check that the multisig transaction is executed ./iroha account list all | grep "congratulations" ! ./iroha --config "client.5.toml" multisig list all | grep $INSTRUCTIONS_HASH diff --git a/scripts/tests/multisig.sh b/scripts/tests/multisig.sh index 272e52b0cfb..c3bfa298d86 100644 --- a/scripts/tests/multisig.sh +++ b/scripts/tests/multisig.sh @@ -38,12 +38,12 @@ for signatory in ${SIGNATORIES[@]}; do ./iroha account register --id $signatory done -# register a multisig account +# register a multisig account by the domain owner MULTISIG_ACCOUNT=$(gen_account_id "msa") WEIGHTS=($(yes 1 | head -n $N_SIGNATORIES)) # equal votes QUORUM=$N_SIGNATORIES # unanimous TRANSACTION_TTL="1y 6M 2w 3d 12h 30m 30s 500ms" -./iroha --config "client.1.toml" multisig register --account $MULTISIG_ACCOUNT --signatories ${SIGNATORIES[*]} --weights ${WEIGHTS[*]} --quorum $QUORUM --transaction-ttl "$TRANSACTION_TTL" +./iroha --config "client.toml" multisig register --account $MULTISIG_ACCOUNT --signatories ${SIGNATORIES[*]} --weights ${WEIGHTS[*]} --quorum $QUORUM --transaction-ttl "$TRANSACTION_TTL" # propose a multisig transaction INSTRUCTIONS="../scripts/tests/instructions.json" diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml index ad54dafcb30..60db2044600 100644 --- a/wasm/Cargo.toml +++ b/wasm/Cargo.toml @@ -30,7 +30,6 @@ iroha_executor = { version = "=2.0.0-rc.1.0", path = "../crates/iroha_executor", iroha_schema = { version = "=2.0.0-rc.1.0", path = "../crates/iroha_schema" } iroha_data_model = { version = "=2.0.0-rc.1.0", path = "../crates/iroha_data_model", default-features = false } -iroha_multisig_data_model = { version = "=2.0.0-rc.1.0", path = "../data_model/libs/iroha_multisig_data_model" } iroha_executor_data_model = { version = "=2.0.0-rc.1.0", path = "../crates/iroha_executor_data_model" } mint_rose_trigger_data_model = { path = "../data_model/samples/mint_rose_trigger_data_model" } executor_custom_data_model = { path = "../data_model/samples/executor_custom_data_model" } diff --git a/wasm/libs/multisig_accounts/Cargo.toml b/wasm/libs/multisig_accounts/Cargo.toml deleted file mode 100644 index d8162aa48c9..00000000000 --- a/wasm/libs/multisig_accounts/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "multisig_accounts" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lib] -crate-type = ['cdylib'] - -[dependencies] -iroha_trigger.workspace = true -iroha_executor_data_model.workspace = true -iroha_multisig_data_model.workspace = true - -panic-halt.workspace = true -dlmalloc.workspace = true - -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, default-features = false } diff --git a/wasm/libs/multisig_accounts/src/lib.rs b/wasm/libs/multisig_accounts/src/lib.rs deleted file mode 100644 index 24401b85916..00000000000 --- a/wasm/libs/multisig_accounts/src/lib.rs +++ /dev/null @@ -1,150 +0,0 @@ -//! Trigger given per domain to control multi-signature accounts and corresponding triggers - -#![no_std] - -extern crate alloc; -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::format; - -use dlmalloc::GlobalDlmalloc; -use iroha_executor_data_model::permission::trigger::CanExecuteTrigger; -use iroha_multisig_data_model::MultisigAccountArgs; -use iroha_trigger::prelude::*; - -#[global_allocator] -static ALLOC: GlobalDlmalloc = GlobalDlmalloc; - -// Binary containing common logic to each multisig account for handling multisig transactions -const MULTISIG_TRANSACTIONS_WASM: &[u8] = core::include_bytes!(concat!( - core::env!("CARGO_MANIFEST_DIR"), - "/../../target/prebuilt/libs/multisig_transactions.wasm" -)); - -#[iroha_trigger::main] -fn main(host: Iroha, context: Context) { - let EventBox::ExecuteTrigger(event) = context.event else { - dbg_panic!("trigger misused: must be triggered only by a call"); - }; - let args: MultisigAccountArgs = event - .args() - .try_into_any() - .dbg_expect("args should be for a multisig account"); - let domain_id = context - .id - .name() - .as_ref() - .strip_prefix("multisig_accounts_") - .and_then(|s| s.parse::().ok()) - .dbg_unwrap(); - let account_id = AccountId::new(domain_id, args.account); - - host.submit(&Register::account(Account::new(account_id.clone()))) - .dbg_expect("accounts registry should successfully register a multisig account"); - - let multisig_transactions_registry_id: TriggerId = format!( - "multisig_transactions_{}_{}", - account_id.signatory(), - account_id.domain() - ) - .parse() - .dbg_unwrap(); - - let multisig_transactions_registry = Trigger::new( - multisig_transactions_registry_id.clone(), - Action::new( - WasmSmartContract::from_compiled(MULTISIG_TRANSACTIONS_WASM.to_vec()), - Repeats::Indefinitely, - account_id.clone(), - ExecuteTriggerEventFilter::new().for_trigger(multisig_transactions_registry_id.clone()), - ), - ); - - host.submit(&Register::trigger(multisig_transactions_registry)) - .dbg_expect("accounts registry should successfully register a transactions registry"); - - host.submit(&SetKeyValue::trigger( - multisig_transactions_registry_id.clone(), - "signatories".parse().unwrap(), - Json::new(&args.signatories), - )) - .dbg_unwrap(); - - host.submit(&SetKeyValue::trigger( - multisig_transactions_registry_id.clone(), - "quorum".parse().unwrap(), - Json::new(&args.quorum), - )) - .dbg_unwrap(); - - host.submit(&SetKeyValue::trigger( - multisig_transactions_registry_id.clone(), - "transaction_ttl_ms".parse().unwrap(), - Json::new(&args.transaction_ttl_ms), - )) - .dbg_unwrap(); - - let role_id: RoleId = format!( - "multisig_signatory_{}_{}", - account_id.signatory(), - account_id.domain() - ) - .parse() - .dbg_unwrap(); - - host.submit(&Register::role( - // Temporarily grant a multisig role to the trigger authority to delegate the role to the signatories - Role::new(role_id.clone(), context.authority.clone()), - )) - .dbg_expect("accounts registry should successfully register a multisig role"); - - for signatory in args.signatories.keys().cloned() { - let is_multisig_again = { - let sub_role_id: RoleId = format!( - "multisig_signatory_{}_{}", - signatory.signatory(), - signatory.domain() - ) - .parse() - .dbg_unwrap(); - - host.query(FindRoleIds) - .filter_with(|role_id| role_id.eq(sub_role_id)) - .execute_single_opt() - .dbg_unwrap() - .is_some() - }; - - if is_multisig_again { - // Allow the transactions registry to write to the sub registry - let sub_registry_id: TriggerId = format!( - "multisig_transactions_{}_{}", - signatory.signatory(), - signatory.domain() - ) - .parse() - .dbg_unwrap(); - - host.submit(&Grant::account_permission( - CanExecuteTrigger { - trigger: sub_registry_id, - }, - account_id.clone(), - )) - .dbg_expect( - "accounts registry should successfully grant permission to the multisig account", - ); - } - - host.submit(&Grant::account_role(role_id.clone(), signatory)) - .dbg_expect( - "accounts registry should successfully grant the multisig role to signatories", - ); - } - - host.submit(&Revoke::account_role(role_id.clone(), context.authority)) - .dbg_expect( - "accounts registry should successfully revoke the multisig role from the trigger authority", - ); -} diff --git a/wasm/libs/multisig_domains/Cargo.toml b/wasm/libs/multisig_domains/Cargo.toml deleted file mode 100644 index efbab17b923..00000000000 --- a/wasm/libs/multisig_domains/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "multisig_domains" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lib] -crate-type = ['cdylib'] - -[dependencies] -iroha_trigger.workspace = true -iroha_executor_data_model.workspace = true - -panic-halt.workspace = true -dlmalloc.workspace = true - -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, default-features = false } diff --git a/wasm/libs/multisig_domains/src/lib.rs b/wasm/libs/multisig_domains/src/lib.rs deleted file mode 100644 index 9b93d096d7f..00000000000 --- a/wasm/libs/multisig_domains/src/lib.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Trigger of world-level authority to enable multisig functionality for domains - -#![no_std] - -extern crate alloc; -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::format; - -use dlmalloc::GlobalDlmalloc; -use iroha_trigger::prelude::*; - -#[global_allocator] -static ALLOC: GlobalDlmalloc = GlobalDlmalloc; - -// Binary containing common logic to each domain for handling multisig accounts -const MULTISIG_ACCOUNTS_WASM: &[u8] = core::include_bytes!(concat!( - core::env!("CARGO_MANIFEST_DIR"), - "/../../target/prebuilt/libs/multisig_accounts.wasm" -)); - -#[iroha_trigger::main] -fn main(host: Iroha, context: Context) { - let EventBox::Data(DataEvent::Domain(event)) = context.event else { - dbg_panic!("trigger misused: must be triggered only by a domain event"); - }; - let (domain_id, domain_owner, owner_changed) = match event { - DomainEvent::Created(domain) => (domain.id().clone(), domain.owned_by().clone(), false), - DomainEvent::OwnerChanged(owner_changed) => ( - owner_changed.domain().clone(), - owner_changed.new_owner().clone(), - true, - ), - _ => dbg_panic!( - "trigger misused: must be triggered only when domain created or owner changed" - ), - }; - - let accounts_registry_id: TriggerId = format!("multisig_accounts_{}", domain_id) - .parse() - .dbg_unwrap(); - - let accounts_registry = if owner_changed { - let existing = host - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(accounts_registry_id.clone())) - .execute_single() - .dbg_expect("accounts registry should be existing"); - - host.submit(&Unregister::trigger(existing.id().clone())) - .dbg_expect("accounts registry should be successfully unregistered"); - - Trigger::new( - existing.id().clone(), - Action::new( - existing.action().executable().clone(), - existing.action().repeats().clone(), - domain_owner, - existing.action().filter().clone(), - ), - ) - } else { - Trigger::new( - accounts_registry_id.clone(), - Action::new( - WasmSmartContract::from_compiled(MULTISIG_ACCOUNTS_WASM.to_vec()), - Repeats::Indefinitely, - domain_owner, - ExecuteTriggerEventFilter::new().for_trigger(accounts_registry_id.clone()), - ), - ) - }; - - host.submit(&Register::trigger(accounts_registry)) - .dbg_expect("accounts registry should be successfully registered"); -} diff --git a/wasm/libs/multisig_transactions/Cargo.toml b/wasm/libs/multisig_transactions/Cargo.toml deleted file mode 100644 index dd676e46c23..00000000000 --- a/wasm/libs/multisig_transactions/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "multisig_transactions" - -edition.workspace = true -version.workspace = true -authors.workspace = true - -license.workspace = true - -[lib] -crate-type = ['cdylib'] - -[dependencies] -iroha_trigger.workspace = true -iroha_multisig_data_model.workspace = true - -panic-halt.workspace = true -dlmalloc.workspace = true - -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, default-features = false } diff --git a/wasm/libs/multisig_transactions/src/lib.rs b/wasm/libs/multisig_transactions/src/lib.rs deleted file mode 100644 index d70c7fc870c..00000000000 --- a/wasm/libs/multisig_transactions/src/lib.rs +++ /dev/null @@ -1,228 +0,0 @@ -//! Trigger given per multi-signature account to control multi-signature transactions - -#![no_std] - -extern crate alloc; -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::{ - collections::{btree_map::BTreeMap, btree_set::BTreeSet}, - format, - vec::Vec, -}; - -use dlmalloc::GlobalDlmalloc; -use iroha_multisig_data_model::MultisigTransactionArgs; -use iroha_trigger::prelude::*; - -#[global_allocator] -static ALLOC: GlobalDlmalloc = GlobalDlmalloc; - -#[iroha_trigger::main] -fn main(host: Iroha, context: Context) { - let EventBox::ExecuteTrigger(event) = context.event else { - dbg_panic!("trigger misused: must be triggered only by a call"); - }; - let trigger_id = context.id; - let args: MultisigTransactionArgs = event - .args() - .try_into_any() - .dbg_expect("args should be for a multisig transaction"); - let signatory = event.authority().clone(); - - let instructions_hash = match &args { - MultisigTransactionArgs::Propose(instructions) => HashOf::new(instructions), - MultisigTransactionArgs::Approve(instructions_hash) => *instructions_hash, - }; - let instructions_metadata_key: Name = format!("proposals/{instructions_hash}/instructions") - .parse() - .unwrap(); - let proposed_at_ms_metadata_key: Name = format!("proposals/{instructions_hash}/proposed_at_ms") - .parse() - .unwrap(); - let approvals_metadata_key: Name = format!("proposals/{instructions_hash}/approvals") - .parse() - .unwrap(); - - let signatories: BTreeMap = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - "signatories".parse().unwrap(), - )) - .dbg_unwrap() - .try_into_any() - .dbg_unwrap(); - - // Recursively deploy multisig authentication down to the personal leaf signatories - for account_id in signatories.keys() { - let sub_transactions_registry_id: TriggerId = format!( - "multisig_transactions_{}_{}", - account_id.signatory(), - account_id.domain() - ) - .parse() - .unwrap(); - - if let Ok(_sub_registry) = host - .query(FindTriggers::new()) - .filter_with(|trigger| trigger.id.eq(sub_transactions_registry_id.clone())) - .execute_single() - { - let propose_to_approve_me: InstructionBox = { - let approve_me: InstructionBox = { - let args = MultisigTransactionArgs::Approve(instructions_hash); - ExecuteTrigger::new(trigger_id.clone()) - .with_args(&args) - .into() - }; - let args = MultisigTransactionArgs::Propose([approve_me].to_vec()); - - ExecuteTrigger::new(sub_transactions_registry_id.clone()) - .with_args(&args) - .into() - }; - host.submit(&propose_to_approve_me) - .dbg_expect("should successfully write to sub registry"); - } - } - - let mut block_headers = host.query(FindBlockHeaders).execute().dbg_unwrap(); - let now_ms: u64 = block_headers - .next() - .dbg_unwrap() - .dbg_unwrap() - .creation_time() - .as_millis() - .try_into() - .dbg_unwrap(); - - let (approvals, instructions) = match args { - MultisigTransactionArgs::Propose(instructions) => { - host.query_single(FindTriggerMetadata::new( - trigger_id.clone(), - approvals_metadata_key.clone(), - )) - .expect_err("instructions shouldn't already be proposed"); - - let approvals = BTreeSet::from([signatory.clone()]); - - host.submit(&SetKeyValue::trigger( - trigger_id.clone(), - instructions_metadata_key.clone(), - Json::new(&instructions), - )) - .dbg_unwrap(); - - host.submit(&SetKeyValue::trigger( - trigger_id.clone(), - proposed_at_ms_metadata_key.clone(), - Json::new(&now_ms), - )) - .dbg_unwrap(); - - host.submit(&SetKeyValue::trigger( - trigger_id.clone(), - approvals_metadata_key.clone(), - Json::new(&approvals), - )) - .dbg_unwrap(); - - (approvals, instructions) - } - MultisigTransactionArgs::Approve(_instructions_hash) => { - let mut approvals: BTreeSet = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - approvals_metadata_key.clone(), - )) - .dbg_expect("instructions should be proposed first") - .try_into_any() - .dbg_unwrap(); - - approvals.insert(signatory.clone()); - - host.submit(&SetKeyValue::trigger( - trigger_id.clone(), - approvals_metadata_key.clone(), - Json::new(&approvals), - )) - .dbg_unwrap(); - - let instructions: Vec = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - instructions_metadata_key.clone(), - )) - .dbg_unwrap() - .try_into_any() - .dbg_unwrap(); - - (approvals, instructions) - } - }; - - let quorum: u16 = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - "quorum".parse().unwrap(), - )) - .dbg_unwrap() - .try_into_any() - .dbg_unwrap(); - - let is_authenticated = quorum - <= signatories - .into_iter() - .filter(|(id, _)| approvals.contains(&id)) - .map(|(_, weight)| weight as u16) - .sum(); - - let is_expired = { - let proposed_at_ms: u64 = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - proposed_at_ms_metadata_key.clone(), - )) - .dbg_unwrap() - .try_into_any() - .dbg_unwrap(); - - let transaction_ttl_ms: u64 = host - .query_single(FindTriggerMetadata::new( - trigger_id.clone(), - "transaction_ttl_ms".parse().unwrap(), - )) - .dbg_unwrap() - .try_into_any() - .dbg_unwrap(); - - proposed_at_ms.saturating_add(transaction_ttl_ms) < now_ms - }; - - if is_authenticated || is_expired { - // Cleanup approvals and instructions - host.submit(&RemoveKeyValue::trigger( - trigger_id.clone(), - approvals_metadata_key, - )) - .dbg_unwrap(); - host.submit(&RemoveKeyValue::trigger( - trigger_id.clone(), - proposed_at_ms_metadata_key, - )) - .dbg_unwrap(); - host.submit(&RemoveKeyValue::trigger( - trigger_id.clone(), - instructions_metadata_key, - )) - .dbg_unwrap(); - - if !is_expired { - // Execute instructions proposal which collected enough approvals - for isi in instructions { - host.submit(&isi).dbg_unwrap(); - } - } - } -} diff --git a/wasm/samples/executor_custom_instructions_complex/src/lib.rs b/wasm/samples/executor_custom_instructions_complex/src/lib.rs index f75bc03538c..3d6e3f425e1 100644 --- a/wasm/samples/executor_custom_instructions_complex/src/lib.rs +++ b/wasm/samples/executor_custom_instructions_complex/src/lib.rs @@ -23,14 +23,14 @@ use iroha_executor::{ static ALLOC: GlobalDlmalloc = GlobalDlmalloc; #[derive(Visit, Execute, Entrypoints)] -#[visit(custom(visit_custom))] +#[visit(custom(visit_custom_instruction))] struct Executor { host: Iroha, context: iroha_executor::prelude::Context, verdict: Result, } -fn visit_custom(executor: &mut Executor, isi: &CustomInstruction) { +fn visit_custom_instruction(executor: &mut Executor, isi: &CustomInstruction) { let Ok(isi) = CustomInstructionExpr::try_from(isi.payload()) else { deny!(executor, "Failed to parse custom instruction"); }; diff --git a/wasm/samples/executor_custom_instructions_simple/src/lib.rs b/wasm/samples/executor_custom_instructions_simple/src/lib.rs index 9a5db335edc..1f9203eb862 100644 --- a/wasm/samples/executor_custom_instructions_simple/src/lib.rs +++ b/wasm/samples/executor_custom_instructions_simple/src/lib.rs @@ -16,14 +16,14 @@ use iroha_executor::{data_model::isi::CustomInstruction, prelude::*}; static ALLOC: GlobalDlmalloc = GlobalDlmalloc; #[derive(Visit, Execute, Entrypoints)] -#[visit(custom(visit_custom))] +#[visit(custom(visit_custom_instruction))] struct Executor { host: Iroha, context: Context, verdict: Result, } -fn visit_custom(executor: &mut Executor, isi: &CustomInstruction) { +fn visit_custom_instruction(executor: &mut Executor, isi: &CustomInstruction) { let Ok(isi) = CustomInstructionBox::try_from(isi.payload()) else { deny!(executor, "Failed to parse custom instruction"); }; From a8cf1c16357824b58757ad1793c2c91a8452b9d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Tue, 19 Nov 2024 07:52:13 +0100 Subject: [PATCH 2/5] feat: add /peers API endpoint to torii (#5235) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- crates/iroha/tests/status_response.rs | 2 +- crates/iroha_core/src/lib.rs | 2 +- crates/iroha_core/src/metrics.rs | 12 ++- .../src/smartcontracts/isi/world.rs | 8 +- crates/iroha_core/src/state.rs | 66 +++++++-------- crates/iroha_core/src/sumeragi/main_loop.rs | 6 +- crates/iroha_core/src/sumeragi/mod.rs | 4 +- crates/iroha_schema_gen/src/lib.rs | 21 ++++- crates/iroha_telemetry/src/metrics.rs | 10 +-- crates/iroha_torii/src/lib.rs | 21 +++-- crates/iroha_torii/src/routing.rs | 8 +- crates/iroha_torii_const/src/lib.rs | 2 + docs/source/references/schema.json | 81 +++++++++++++++++++ .../common/schemas/get_status_response.json | 4 +- 14 files changed, 175 insertions(+), 72 deletions(-) diff --git a/crates/iroha/tests/status_response.rs b/crates/iroha/tests/status_response.rs index 41e4982cff3..b6e1e9526d7 100644 --- a/crates/iroha/tests/status_response.rs +++ b/crates/iroha/tests/status_response.rs @@ -7,7 +7,7 @@ use tokio::task::spawn_blocking; fn status_eq_excluding_uptime_and_queue(lhs: &Status, rhs: &Status) -> bool { lhs.peers == rhs.peers && lhs.blocks == rhs.blocks - && lhs.txs_accepted == rhs.txs_accepted + && lhs.txs_approved == rhs.txs_approved && lhs.txs_rejected == rhs.txs_rejected && lhs.view_changes == rhs.view_changes } diff --git a/crates/iroha_core/src/lib.rs b/crates/iroha_core/src/lib.rs index 29ba08689ff..1019560ed63 100644 --- a/crates/iroha_core/src/lib.rs +++ b/crates/iroha_core/src/lib.rs @@ -38,7 +38,7 @@ pub const TX_RETRIEVAL_INTERVAL: Duration = Duration::from_millis(100); pub type IrohaNetwork = iroha_p2p::NetworkHandle; /// Ids of peers. -pub type PeersIds = UniqueVec; +pub type Peers = UniqueVec; /// Type of `Sender` which should be used for channels of `Event` messages. pub type EventsSender = broadcast::Sender; diff --git a/crates/iroha_core/src/metrics.rs b/crates/iroha_core/src/metrics.rs index e9ccc1829ae..2d080e288e8 100644 --- a/crates/iroha_core/src/metrics.rs +++ b/crates/iroha_core/src/metrics.rs @@ -3,6 +3,7 @@ use std::{num::NonZeroUsize, sync::Arc, time::SystemTime}; use eyre::{Result, WrapErr as _}; +use iroha_data_model::peer::Peer; use iroha_telemetry::metrics::Metrics; use mv::storage::StorageReadOnly; use parking_lot::Mutex; @@ -76,12 +77,12 @@ impl MetricsReporter { }; block_index += 1; let block_txs_rejected = block.errors().count() as u64; - let block_txs_accepted = block.transactions().count() as u64 - block_txs_rejected; + let block_txs_approved = block.transactions().count() as u64 - block_txs_rejected; self.metrics .txs .with_label_values(&["accepted"]) - .inc_by(block_txs_accepted); + .inc_by(block_txs_approved); self.metrics .txs .with_label_values(&["rejected"]) @@ -89,7 +90,7 @@ impl MetricsReporter { self.metrics .txs .with_label_values(&["total"]) - .inc_by(block_txs_accepted + block_txs_rejected); + .inc_by(block_txs_approved + block_txs_rejected); self.metrics.block_height.inc(); } *lastest_block_height = block_index; @@ -147,4 +148,9 @@ impl MetricsReporter { pub fn metrics(&self) -> &Metrics { &self.metrics } + + /// Last known online peers + pub fn online_peers(&self) -> Vec { + self.network.online_peers(|x| x.iter().cloned().collect()) + } } diff --git a/crates/iroha_core/src/smartcontracts/isi/world.rs b/crates/iroha_core/src/smartcontracts/isi/world.rs index 7e2d18c2508..38d44dd6868 100644 --- a/crates/iroha_core/src/smartcontracts/isi/world.rs +++ b/crates/iroha_core/src/smartcontracts/isi/world.rs @@ -39,8 +39,7 @@ pub mod isi { let peer_id = self.object; let world = &mut state_transaction.world; - if let PushResult::Duplicate(duplicate) = world.trusted_peers_ids.push(peer_id.clone()) - { + if let PushResult::Duplicate(duplicate) = world.peers.push(peer_id.clone()) { return Err(RepetitionError { instruction: InstructionType::Register, id: IdBox::PeerId(duplicate), @@ -63,11 +62,11 @@ pub mod isi { ) -> Result<(), Error> { let peer_id = self.object; let world = &mut state_transaction.world; - let Some(index) = world.trusted_peers_ids.iter().position(|id| id == &peer_id) else { + let Some(index) = world.peers.iter().position(|id| id == &peer_id) else { return Err(FindError::Peer(peer_id).into()); }; - world.trusted_peers_ids.remove(index); + world.peers.remove(index); world.emit_events(Some(PeerEvent::Removed(peer_id))); @@ -513,6 +512,7 @@ pub mod query { Ok(state_ro .world() .peers() + .into_iter() .filter(move |peer| filter.applies(peer)) .cloned()) } diff --git a/crates/iroha_core/src/state.rs b/crates/iroha_core/src/state.rs index d77a6c41dc7..e549c393dd6 100644 --- a/crates/iroha_core/src/state.rs +++ b/crates/iroha_core/src/state.rs @@ -56,7 +56,7 @@ use crate::{ }, wasm, Execute, }, - PeersIds, + Peers, }; /// The global entity consisting of `domains`, `triggers` and etc. @@ -65,8 +65,8 @@ use crate::{ pub struct World { /// Iroha on-chain parameters. pub(crate) parameters: Cell, - /// Identifications of discovered trusted peers. - pub(crate) trusted_peers_ids: Cell, + /// Identifications of discovered peers. + pub(crate) peers: Cell, /// Registered domains. pub(crate) domains: Storage, /// Registered accounts. @@ -93,8 +93,8 @@ pub struct World { pub struct WorldBlock<'world> { /// Iroha on-chain parameters. pub parameters: CellBlock<'world, Parameters>, - /// Identifications of discovered trusted peers. - pub(crate) trusted_peers_ids: CellBlock<'world, PeersIds>, + /// Identifications of discovered peers. + pub(crate) peers: CellBlock<'world, Peers>, /// Registered domains. pub(crate) domains: StorageBlock<'world, DomainId, Domain>, /// Registered accounts. @@ -123,8 +123,8 @@ pub struct WorldBlock<'world> { pub struct WorldTransaction<'block, 'world> { /// Iroha on-chain parameters. pub(crate) parameters: CellTransaction<'block, 'world, Parameters>, - /// Identifications of discovered trusted peers. - pub(crate) trusted_peers_ids: CellTransaction<'block, 'world, PeersIds>, + /// Identifications of discovered peers. + pub(crate) peers: CellTransaction<'block, 'world, Peers>, /// Registered domains. pub(crate) domains: StorageTransaction<'block, 'world, DomainId, Domain>, /// Registered accounts. @@ -162,8 +162,8 @@ struct TransactionEventBuffer<'block> { pub struct WorldView<'world> { /// Iroha on-chain parameters. pub(crate) parameters: CellView<'world, Parameters>, - /// Identifications of discovered trusted peers. - pub(crate) trusted_peers_ids: CellView<'world, PeersIds>, + /// Identifications of discovered peers. + pub(crate) peers: CellView<'world, Peers>, /// Registered domains. pub(crate) domains: StorageView<'world, DomainId, Domain>, /// Registered accounts. @@ -303,7 +303,7 @@ impl World { Self::default() } - /// Creates a [`World`] with these [`Domain`]s and trusted [`PeerId`]s. + /// Creates a [`World`] with these [`Domain`]s and [`Peer`]s. pub fn with(domains: D, accounts: A, asset_definitions: Ad) -> Self where D: IntoIterator, @@ -313,7 +313,7 @@ impl World { Self::with_assets(domains, accounts, asset_definitions, []) } - /// Creates a [`World`] with these [`Domain`]s and trusted [`PeerId`]s. + /// Creates a [`World`] with these [`Domain`]s and [`Peer`]s. pub fn with_assets( domains: D, accounts: A, @@ -352,7 +352,7 @@ impl World { pub fn block(&self) -> WorldBlock { WorldBlock { parameters: self.parameters.block(), - trusted_peers_ids: self.trusted_peers_ids.block(), + peers: self.peers.block(), domains: self.domains.block(), accounts: self.accounts.block(), asset_definitions: self.asset_definitions.block(), @@ -371,7 +371,7 @@ impl World { pub fn block_and_revert(&self) -> WorldBlock { WorldBlock { parameters: self.parameters.block_and_revert(), - trusted_peers_ids: self.trusted_peers_ids.block_and_revert(), + peers: self.peers.block_and_revert(), domains: self.domains.block_and_revert(), accounts: self.accounts.block_and_revert(), asset_definitions: self.asset_definitions.block_and_revert(), @@ -390,7 +390,7 @@ impl World { pub fn view(&self) -> WorldView { WorldView { parameters: self.parameters.view(), - trusted_peers_ids: self.trusted_peers_ids.view(), + peers: self.peers.view(), domains: self.domains.view(), accounts: self.accounts.view(), asset_definitions: self.asset_definitions.view(), @@ -409,7 +409,7 @@ impl World { #[allow(missing_docs)] pub trait WorldReadOnly { fn parameters(&self) -> &Parameters; - fn trusted_peers_ids(&self) -> &PeersIds; + fn peers(&self) -> &Peers; fn domains(&self) -> &impl StorageReadOnly; fn accounts(&self) -> &impl StorageReadOnly; fn asset_definitions(&self) -> &impl StorageReadOnly; @@ -635,17 +635,6 @@ pub trait WorldReadOnly { fn asset_total_amount(&self, definition_id: &AssetDefinitionId) -> Result { Ok(self.asset_definition(definition_id)?.total_quantity) } - - /// Get an immutable iterator over the [`PeerId`]s. - fn peers(&self) -> impl ExactSizeIterator { - self.trusted_peers_ids().iter() - } - - /// Returns reference for trusted peer ids - #[inline] - fn peers_ids(&self) -> &PeersIds { - self.trusted_peers_ids() - } } macro_rules! impl_world_ro { @@ -654,8 +643,8 @@ macro_rules! impl_world_ro { fn parameters(&self) -> &Parameters { &self.parameters } - fn trusted_peers_ids(&self) -> &PeersIds { - &self.trusted_peers_ids + fn peers(&self) -> &Peers { + &self.peers } fn domains(&self) -> &impl StorageReadOnly { &self.domains @@ -700,7 +689,7 @@ impl<'world> WorldBlock<'world> { pub fn trasaction(&mut self) -> WorldTransaction<'_, 'world> { WorldTransaction { parameters: self.parameters.transaction(), - trusted_peers_ids: self.trusted_peers_ids.transaction(), + peers: self.peers.transaction(), domains: self.domains.transaction(), accounts: self.accounts.transaction(), asset_definitions: self.asset_definitions.transaction(), @@ -723,7 +712,7 @@ impl<'world> WorldBlock<'world> { // NOTE: intentionally destruct self not to forget commit some fields let Self { parameters, - trusted_peers_ids, + peers, domains, accounts, asset_definitions, @@ -747,7 +736,7 @@ impl<'world> WorldBlock<'world> { asset_definitions.commit(); accounts.commit(); domains.commit(); - trusted_peers_ids.commit(); + peers.commit(); parameters.commit(); } } @@ -758,7 +747,7 @@ impl WorldTransaction<'_, '_> { // NOTE: intentionally destruct self not to forget commit some fields let Self { parameters, - trusted_peers_ids, + peers, domains, accounts, asset_definitions, @@ -781,7 +770,7 @@ impl WorldTransaction<'_, '_> { asset_definitions.apply(); accounts.apply(); domains.apply(); - trusted_peers_ids.apply(); + peers.apply(); parameters.apply(); events_buffer.events_created_in_transaction = 0; } @@ -1864,7 +1853,7 @@ pub(crate) mod deserialize { M: MapAccess<'de>, { let mut parameters = None; - let mut trusted_peers_ids = None; + let mut peers = None; let mut domains = None; let mut accounts = None; let mut asset_definitions = None; @@ -1881,8 +1870,8 @@ pub(crate) mod deserialize { "parameters" => { parameters = Some(map.next_value()?); } - "trusted_peers_ids" => { - trusted_peers_ids = Some(map.next_value()?); + "peers" => { + peers = Some(map.next_value()?); } "domains" => { domains = Some(map.next_value()?); @@ -1925,8 +1914,7 @@ pub(crate) mod deserialize { Ok(World { parameters: parameters .ok_or_else(|| serde::de::Error::missing_field("parameters"))?, - trusted_peers_ids: trusted_peers_ids - .ok_or_else(|| serde::de::Error::missing_field("trusted_peers_ids"))?, + peers: peers.ok_or_else(|| serde::de::Error::missing_field("peers"))?, domains: domains .ok_or_else(|| serde::de::Error::missing_field("domains"))?, accounts: accounts @@ -1955,7 +1943,7 @@ pub(crate) mod deserialize { "World", &[ "parameters", - "trusted_peers_ids", + "peers", "domains", "roles", "account_permissions", diff --git a/crates/iroha_core/src/sumeragi/main_loop.rs b/crates/iroha_core/src/sumeragi/main_loop.rs index efdfd30db89..f94df318ed5 100644 --- a/crates/iroha_core/src/sumeragi/main_loop.rs +++ b/crates/iroha_core/src/sumeragi/main_loop.rs @@ -273,7 +273,7 @@ impl Sumeragi { } // NOTE: By this time genesis block is executed and list of trusted peers is updated - self.topology = Topology::new(state_block.world.trusted_peers_ids.clone()); + self.topology = Topology::new(state_block.world.peers.clone()); self.commit_block(block, state_block); return Ok(()); } @@ -318,7 +318,7 @@ impl Sumeragi { ); // NOTE: By this time genesis block is executed and list of trusted peers is updated - self.topology = Topology::new(state_block.world.trusted_peers_ids.clone()); + self.topology = Topology::new(state_block.world.peers.clone()); let genesis = genesis .commit(&self.topology) @@ -343,7 +343,7 @@ impl Sumeragi { let prev_role = self.role(); self.topology - .block_committed(state_block.world.peers().cloned()); + .block_committed(state_block.world.peers().clone()); let state_events = state_block.apply_without_execution(&block, self.topology.as_ref().to_owned()); diff --git a/crates/iroha_core/src/sumeragi/mod.rs b/crates/iroha_core/src/sumeragi/mod.rs index c3a6f51e711..0da7911713f 100644 --- a/crates/iroha_core/src/sumeragi/mod.rs +++ b/crates/iroha_core/src/sumeragi/mod.rs @@ -118,10 +118,10 @@ impl SumeragiHandle { .expect("INTERNAL BUG: Invalid block stored in Kura"); if block.as_ref().header().is_genesis() { - *topology = Topology::new(state_block.world.trusted_peers_ids.clone()); + *topology = Topology::new(state_block.world.peers.clone()); } - topology.block_committed(state_block.world.peers().cloned()); + topology.block_committed(state_block.world.peers().clone()); state_block .apply_without_execution(&block, topology.as_ref().to_owned()) diff --git a/crates/iroha_schema_gen/src/lib.rs b/crates/iroha_schema_gen/src/lib.rs index 0c13c559cc4..b0fd0fe0b40 100644 --- a/crates/iroha_schema_gen/src/lib.rs +++ b/crates/iroha_schema_gen/src/lib.rs @@ -45,10 +45,9 @@ pub fn build_schemas() -> MetaMap { } schemas! { - // Transaction - SignedTransaction, + Peer, - // Query + response + SignedTransaction, SignedQuery, QueryResponse, @@ -265,6 +264,8 @@ types!( InstructionType, InvalidParameterError, IpfsPath, + Ipv6Addr, + Ipv4Addr, Json, Level, Log, @@ -322,6 +323,7 @@ types!( PeerEvent, PeerEventFilter, PeerEventSet, + Peer, PeerId, PeerPredicateBox, Permission, @@ -406,6 +408,10 @@ types!( SingularQueryOutputBox, SmartContractParameter, SmartContractParameters, + SocketAddr, + SocketAddrHost, + SocketAddrV4, + SocketAddrV6, Sorting, String, StringPredicateBox, @@ -490,6 +496,8 @@ types!( WasmExecutionFail, WasmSmartContract, + [u16; 8], + [u8; 4], [u8; 32], u16, u32, @@ -546,7 +554,12 @@ pub mod complete_data_model { Level, }; pub use iroha_genesis::{GenesisWasmAction, GenesisWasmTrigger, WasmPath}; - pub use iroha_primitives::{const_vec::ConstVec, conststr::ConstString, json::Json}; + pub use iroha_primitives::{ + addr::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrHost, SocketAddrV4, SocketAddrV6}, + const_vec::ConstVec, + conststr::ConstString, + json::Json, + }; pub use iroha_schema::Compact; } diff --git a/crates/iroha_telemetry/src/metrics.rs b/crates/iroha_telemetry/src/metrics.rs index 0bf373ea4df..096f5ae57f8 100644 --- a/crates/iroha_telemetry/src/metrics.rs +++ b/crates/iroha_telemetry/src/metrics.rs @@ -54,9 +54,9 @@ pub struct Status { /// Number of committed blocks (blockchain height) #[codec(compact)] pub blocks: u64, - /// Number of accepted transactions + /// Number of approved transactions #[codec(compact)] - pub txs_accepted: u64, + pub txs_approved: u64, /// Number of rejected transactions #[codec(compact)] pub txs_rejected: u64, @@ -76,7 +76,7 @@ impl> From<&T> for Status { Self { peers: val.connected_peers.get(), blocks: val.block_height.get(), - txs_accepted: val.txs.with_label_values(&["accepted"]).get(), + txs_approved: val.txs.with_label_values(&["accepted"]).get(), txs_rejected: val.txs.with_label_values(&["rejected"]).get(), uptime: Uptime(Duration::from_millis(val.uptime_since_genesis_ms.get())), view_changes: val @@ -249,7 +249,7 @@ mod test { Status { peers: 4, blocks: 5, - txs_accepted: 31, + txs_approved: 31, txs_rejected: 3, uptime: Uptime(Duration::new(5, 937_000_000)), view_changes: 2, @@ -268,7 +268,7 @@ mod test { { "peers": 4, "blocks": 5, - "txs_accepted": 31, + "txs_approved": 31, "txs_rejected": 3, "uptime": { "secs": 5, diff --git a/crates/iroha_torii/src/lib.rs b/crates/iroha_torii/src/lib.rs index 20d3a4f53c7..fbf8f61f00e 100644 --- a/crates/iroha_torii/src/lib.rs +++ b/crates/iroha_torii/src/lib.rs @@ -105,6 +105,13 @@ impl Torii { let kiso = self.kiso.clone(); move || routing::handle_get_configuration(kiso) }), + ) + .route( + uri::API_VERSION, + get({ + let state = self.state.clone(); + move || routing::handle_version(state) + }), ); #[cfg(feature = "telemetry")] @@ -122,6 +129,13 @@ impl Torii { } }), ) + .route( + uri::PEERS, + get({ + let metrics_reporter = self.metrics_reporter.clone(); + move || core::future::ready(routing::handle_peers(&metrics_reporter)) + }), + ) .route( uri::STATUS, get({ @@ -137,13 +151,6 @@ impl Torii { let metrics_reporter = self.metrics_reporter.clone(); move || core::future::ready(routing::handle_metrics(&metrics_reporter)) }), - ) - .route( - uri::API_VERSION, - get({ - let state = self.state.clone(); - move || routing::handle_version(state) - }), ); #[cfg(feature = "schema")] diff --git a/crates/iroha_torii/src/routing.rs b/crates/iroha_torii/src/routing.rs index 5fa43f72c05..f1190cc7e9a 100644 --- a/crates/iroha_torii/src/routing.rs +++ b/crates/iroha_torii/src/routing.rs @@ -246,7 +246,6 @@ pub mod event { } #[iroha_futures::telemetry_future] -#[cfg(feature = "telemetry")] pub async fn handle_version(state: Arc) -> String { use iroha_version::Version; @@ -274,6 +273,13 @@ pub fn handle_metrics(metrics_reporter: &MetricsReporter) -> Result { .map_err(Error::Prometheus) } +#[cfg(feature = "telemetry")] +pub fn handle_peers(metrics_reporter: &MetricsReporter) -> Response { + update_metrics_gracefully(metrics_reporter); + let peers = metrics_reporter.online_peers(); + axum::Json(peers).into_response() +} + #[cfg(feature = "telemetry")] #[allow(clippy::unnecessary_wraps)] pub fn handle_status( diff --git a/crates/iroha_torii_const/src/lib.rs b/crates/iroha_torii_const/src/lib.rs index 0330e4e52bf..1a1b500f025 100644 --- a/crates/iroha_torii_const/src/lib.rs +++ b/crates/iroha_torii_const/src/lib.rs @@ -14,6 +14,8 @@ pub mod uri { pub const CONSENSUS: &str = "/consensus"; /// Health URI is used to handle incoming Healthcheck requests. pub const HEALTH: &str = "/health"; + /// Peers URI is used to find all peers in the network + pub const PEERS: &str = "/peers"; /// The URI used for block synchronization. pub const BLOCK_SYNC: &str = "/block/sync"; /// The web socket uri used to subscribe to block and transactions statuses. diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index dc3c8ea26b3..a3d775a6900 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -228,12 +228,24 @@ } ] }, + "Array": { + "Array": { + "type": "u16", + "len": 8 + } + }, "Array": { "Array": { "type": "u8", "len": 32 } }, + "Array": { + "Array": { + "type": "u8", + "len": 4 + } + }, "Asset": { "Struct": [ { @@ -2364,6 +2376,8 @@ ] }, "IpfsPath": "String", + "Ipv4Addr": "Array", + "Ipv6Addr": "Array", "Json": "String", "Level": { "Enum": [ @@ -2902,6 +2916,18 @@ } ] }, + "Peer": { + "Struct": [ + { + "name": "address", + "type": "SocketAddr" + }, + { + "name": "id", + "type": "PeerId" + } + ] + }, "PeerEvent": { "Enum": [ { @@ -4221,6 +4247,61 @@ } ] }, + "SocketAddr": { + "Enum": [ + { + "tag": "Ipv4", + "discriminant": 0, + "type": "SocketAddrV4" + }, + { + "tag": "Ipv6", + "discriminant": 1, + "type": "SocketAddrV6" + }, + { + "tag": "Host", + "discriminant": 2, + "type": "SocketAddrHost" + } + ] + }, + "SocketAddrHost": { + "Struct": [ + { + "name": "host", + "type": "String" + }, + { + "name": "port", + "type": "u16" + } + ] + }, + "SocketAddrV4": { + "Struct": [ + { + "name": "ip", + "type": "Ipv4Addr" + }, + { + "name": "port", + "type": "u16" + } + ] + }, + "SocketAddrV6": { + "Struct": [ + { + "name": "ip", + "type": "Ipv6Addr" + }, + { + "name": "port", + "type": "u16" + } + ] + }, "SortedMap": { "Map": { "key": "AccountId", diff --git a/pytests/iroha_torii_tests/common/schemas/get_status_response.json b/pytests/iroha_torii_tests/common/schemas/get_status_response.json index efc5ccdccbc..9388361a18a 100644 --- a/pytests/iroha_torii_tests/common/schemas/get_status_response.json +++ b/pytests/iroha_torii_tests/common/schemas/get_status_response.json @@ -8,7 +8,7 @@ "blocks": { "type": "integer" }, - "txs_accepted": { + "txs_approved": { "type": "integer" }, "txs_rejected": { @@ -33,5 +33,5 @@ "type": "integer" } }, - "required": ["peers", "blocks", "txs_accepted", "txs_rejected", "uptime", "view_changes", "queue_size"] + "required": ["peers", "blocks", "txs_approved", "txs_rejected", "uptime", "view_changes", "queue_size"] } From 6df8a84ba86b7da5c7a0580517ce6e8a61b40de9 Mon Sep 17 00:00:00 2001 From: Dmitry Murzin Date: Tue, 19 Nov 2024 12:17:38 +0300 Subject: [PATCH 3/5] refactor: Simplify revoking permission in default executor (#5239) Signed-off-by: Dmitry Murzin --- crates/iroha_executor/src/default/mod.rs | 109 +++++------------------ crates/iroha_executor/src/permission.rs | 27 ++++++ 2 files changed, 47 insertions(+), 89 deletions(-) diff --git a/crates/iroha_executor/src/default/mod.rs b/crates/iroha_executor/src/default/mod.rs index 7a8479a8ee3..26d354a3f5d 100644 --- a/crates/iroha_executor/src/default/mod.rs +++ b/crates/iroha_executor/src/default/mod.rs @@ -166,7 +166,7 @@ pub mod domain { use super::*; use crate::permission::{ - account::is_account_owner, accounts_permissions, domain::is_domain_owner, roles_permissions, + account::is_account_owner, domain::is_domain_owner, revoke_permissions, }; pub fn visit_register_domain( @@ -202,30 +202,13 @@ pub mod domain { .is_owned_by(&executor.context().authority, executor.host()) } { - let mut err = None; - for (owner_id, permission) in accounts_permissions(executor.host()) { - if is_permission_domain_associated(&permission, domain_id) { - let isi = &Revoke::account_permission(permission, owner_id.clone()); - - if let Err(error) = executor.host().submit(isi) { - err = Some(error); - break; - } - } - } - if let Some(err) = err { + let err = revoke_permissions(executor, |permission| { + is_permission_domain_associated(permission, domain_id) + }); + if let Err(err) = err { deny!(executor, err); } - for (role_id, permission) in roles_permissions(executor.host()) { - if is_permission_domain_associated(&permission, domain_id) { - let isi = &Revoke::role_permission(permission, role_id.clone()); - - if let Err(err) = executor.host().submit(isi) { - deny!(executor, err); - } - } - } execute!(executor, isi); } deny!(executor, "Can't unregister domain"); @@ -389,7 +372,7 @@ pub mod account { }; use super::*; - use crate::permission::{account::is_account_owner, accounts_permissions, roles_permissions}; + use crate::permission::{account::is_account_owner, revoke_permissions}; pub fn visit_register_account( executor: &mut V, @@ -441,30 +424,13 @@ pub mod account { .is_owned_by(&executor.context().authority, executor.host()) } { - let mut err = None; - for (owner_id, permission) in accounts_permissions(executor.host()) { - if is_permission_account_associated(&permission, account_id) { - let isi = &Revoke::account_permission(permission, owner_id.clone()); - - if let Err(error) = executor.host().submit(isi) { - err = Some(error); - break; - } - } - } - if let Some(err) = err { + let err = revoke_permissions(executor, |permission| { + is_permission_account_associated(permission, account_id) + }); + if let Err(err) = err { deny!(executor, err); } - for (role_id, permission) in roles_permissions(executor.host()) { - if is_permission_account_associated(&permission, account_id) { - let isi = &Revoke::role_permission(permission, role_id.clone()); - - if let Err(err) = executor.host().submit(isi) { - deny!(executor, err); - } - } - } execute!(executor, isi); } deny!(executor, "Can't unregister another account"); @@ -580,8 +546,7 @@ pub mod asset_definition { use super::*; use crate::permission::{ - account::is_account_owner, accounts_permissions, - asset_definition::is_asset_definition_owner, roles_permissions, + account::is_account_owner, asset_definition::is_asset_definition_owner, revoke_permissions, }; pub fn visit_register_asset_definition( @@ -638,30 +603,13 @@ pub mod asset_definition { .is_owned_by(&executor.context().authority, executor.host()) } { - let mut err = None; - for (owner_id, permission) in accounts_permissions(executor.host()) { - if is_permission_asset_definition_associated(&permission, asset_definition_id) { - let isi = &Revoke::account_permission(permission, owner_id.clone()); - - if let Err(error) = executor.host().submit(isi) { - err = Some(error); - break; - } - } - } - if let Some(err) = err { + let err = revoke_permissions(executor, |permission| { + is_permission_asset_definition_associated(permission, asset_definition_id) + }); + if let Err(err) = err { deny!(executor, err); } - for (role_id, permission) in roles_permissions(executor.host()) { - if is_permission_asset_definition_associated(&permission, asset_definition_id) { - let isi = &Revoke::role_permission(permission, role_id.clone()); - - if let Err(err) = executor.host().submit(isi) { - deny!(executor, err); - } - } - } execute!(executor, isi); } deny!( @@ -1367,7 +1315,7 @@ pub mod trigger { use super::*; use crate::permission::{ - accounts_permissions, domain::is_domain_owner, roles_permissions, trigger::is_trigger_owner, + domain::is_domain_owner, revoke_permissions, trigger::is_trigger_owner, }; pub fn visit_register_trigger( @@ -1420,30 +1368,13 @@ pub mod trigger { .is_owned_by(&executor.context().authority, executor.host()) } { - let mut err = None; - for (owner_id, permission) in accounts_permissions(executor.host()) { - if is_permission_trigger_associated(&permission, trigger_id) { - let isi = &Revoke::account_permission(permission, owner_id.clone()); - - if let Err(error) = executor.host().submit(isi) { - err = Some(error); - break; - } - } - } - if let Some(err) = err { + let err = revoke_permissions(executor, |permission| { + is_permission_trigger_associated(permission, trigger_id) + }); + if let Err(err) = err { deny!(executor, err); } - for (role_id, permission) in roles_permissions(executor.host()) { - if is_permission_trigger_associated(&permission, trigger_id) { - let isi = &Revoke::role_permission(permission, role_id.clone()); - if let Err(err) = executor.host().submit(isi) { - deny!(executor, err); - } - } - } - execute!(executor, isi); } deny!( diff --git a/crates/iroha_executor/src/permission.rs b/crates/iroha_executor/src/permission.rs index 7460b5e0df4..e5ef2a51b6c 100644 --- a/crates/iroha_executor/src/permission.rs +++ b/crates/iroha_executor/src/permission.rs @@ -10,6 +10,7 @@ use crate::{ data_model::{executor::Result, permission::Permission as PermissionObject, prelude::*}, prelude::*, }, + Execute, }; /// Declare permission types of current module. Use it with a full path to the permission. @@ -1084,3 +1085,29 @@ pub(crate) fn roles_permissions(host: &Iroha) -> impl Iterator( + executor: &mut V, + condition: impl Fn(&PermissionObject) -> bool, +) -> Result<(), ValidationFail> { + for (owner_id, permission) in accounts_permissions(executor.host()) { + if condition(&permission) { + let isi = Revoke::account_permission(permission, owner_id.clone()); + + executor.host().submit(&isi)?; + } + } + + for (role_id, permission) in roles_permissions(executor.host()) { + if condition(&permission) { + let isi = Revoke::role_permission(permission, role_id.clone()); + + executor.host().submit(&isi)?; + } + } + + Ok(()) +} From eb5be5bae979b125ea9f9aa930220e179f0ad9aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Wed, 20 Nov 2024 14:58:49 +0100 Subject: [PATCH 4/5] chore(schema): export missing permissions (#5249) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- crates/iroha_schema_gen/src/lib.rs | 19 +++++++++++++++++++ docs/source/references/schema.json | 17 +++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/crates/iroha_schema_gen/src/lib.rs b/crates/iroha_schema_gen/src/lib.rs index b0fd0fe0b40..82fa07d5d93 100644 --- a/crates/iroha_schema_gen/src/lib.rs +++ b/crates/iroha_schema_gen/src/lib.rs @@ -64,16 +64,23 @@ pub fn build_schemas() -> MetaMap { // Default permissions permission::peer::CanManagePeers, + + permission::domain::CanRegisterDomain, permission::domain::CanUnregisterDomain, permission::domain::CanModifyDomainMetadata, + permission::account::CanRegisterAccount, permission::account::CanUnregisterAccount, permission::account::CanModifyAccountMetadata, + permission::asset_definition::CanRegisterAssetDefinition, permission::asset_definition::CanUnregisterAssetDefinition, permission::asset_definition::CanModifyAssetDefinitionMetadata, + permission::asset::CanRegisterAssetWithDefinition, permission::asset::CanUnregisterAssetWithDefinition, + permission::asset::CanMintAssetWithDefinition, + permission::asset::CanBurnAssetWithDefinition, permission::asset::CanTransferAssetWithDefinition, permission::asset::CanRegisterAsset, permission::asset::CanUnregisterAsset, @@ -81,13 +88,16 @@ pub fn build_schemas() -> MetaMap { permission::asset::CanBurnAsset, permission::asset::CanTransferAsset, permission::asset::CanModifyAssetMetadata, + permission::parameter::CanSetParameters, permission::role::CanManageRoles, + permission::trigger::CanRegisterTrigger, permission::trigger::CanExecuteTrigger, permission::trigger::CanUnregisterTrigger, permission::trigger::CanModifyTrigger, permission::trigger::CanModifyTriggerMetadata, + permission::executor::CanUpgradeExecutor, // Multi-signature operations @@ -595,6 +605,7 @@ mod tests { insert_into_test_map!(Compact); insert_into_test_map!(iroha_executor_data_model::permission::peer::CanManagePeers); + insert_into_test_map!(iroha_executor_data_model::permission::domain::CanRegisterDomain); insert_into_test_map!(iroha_executor_data_model::permission::domain::CanUnregisterDomain); insert_into_test_map!( iroha_executor_data_model::permission::domain::CanModifyDomainMetadata @@ -617,6 +628,12 @@ mod tests { insert_into_test_map!( iroha_executor_data_model::permission::asset::CanUnregisterAssetWithDefinition ); + insert_into_test_map!( + iroha_executor_data_model::permission::asset::CanMintAssetWithDefinition + ); + insert_into_test_map!( + iroha_executor_data_model::permission::asset::CanBurnAssetWithDefinition + ); insert_into_test_map!( iroha_executor_data_model::permission::asset::CanTransferAssetWithDefinition ); @@ -626,8 +643,10 @@ mod tests { insert_into_test_map!(iroha_executor_data_model::permission::asset::CanBurnAsset); insert_into_test_map!(iroha_executor_data_model::permission::asset::CanTransferAsset); insert_into_test_map!(iroha_executor_data_model::permission::asset::CanModifyAssetMetadata); + insert_into_test_map!(iroha_executor_data_model::permission::parameter::CanSetParameters); insert_into_test_map!(iroha_executor_data_model::permission::role::CanManageRoles); + insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanRegisterTrigger); insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanExecuteTrigger); insert_into_test_map!(iroha_executor_data_model::permission::trigger::CanUnregisterTrigger); diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index a3d775a6900..49dbe788535 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -810,6 +810,14 @@ } ] }, + "CanBurnAssetWithDefinition": { + "Struct": [ + { + "name": "asset_definition", + "type": "AssetDefinitionId" + } + ] + }, "CanExecuteTrigger": { "Struct": [ { @@ -828,6 +836,14 @@ } ] }, + "CanMintAssetWithDefinition": { + "Struct": [ + { + "name": "asset_definition", + "type": "AssetDefinitionId" + } + ] + }, "CanModifyAccountMetadata": { "Struct": [ { @@ -908,6 +924,7 @@ } ] }, + "CanRegisterDomain": null, "CanRegisterTrigger": { "Struct": [ { From 88363049caa856639aaf37528879212269b1dae5 Mon Sep 17 00:00:00 2001 From: Mykhailo Lohachov Date: Thu, 21 Nov 2024 19:42:29 +0900 Subject: [PATCH 5/5] feat: Add listen timeouts to iroha cli (#5241) * feat: add timeouts to client cli Signed-off-by: Lohachov Mykhailo * chore: remove unused macro Signed-off-by: Lohachov Mykhailo * test: add tests for cli Signed-off-by: Lohachov Mykhailo * fix: test codecover Signed-off-by: Lohachov Mykhailo * fix: remove unused tests Signed-off-by: Lohachov Mykhailo * chore: newlines Signed-off-by: Lohachov Mykhailo * chore: make args global Signed-off-by: Lohachov Mykhailo * chore: update docker-compose.single.yml Signed-off-by: Mykhailo Lohachov * fix: use float for duration Signed-off-by: Lohachov Mykhailo * fix: use milis in events streaming Signed-off-by: Lohachov Mykhailo * chore: fix compose Signed-off-by: Lohachov Mykhailo * fix: use humantime Signed-off-by: Lohachov Mykhailo * test: add integration test Signed-off-by: Lohachov Mykhailo * chore: fmt Signed-off-by: Lohachov Mykhailo * chore: fmt Signed-off-by: Lohachov Mykhailo * chore: update python test dependencies Signed-off-by: Lohachov Mykhailo --------- Signed-off-by: Lohachov Mykhailo Signed-off-by: Mykhailo Lohachov --- Cargo.lock | 2 + crates/iroha_cli/Cargo.toml | 2 + crates/iroha_cli/src/main.rs | 110 +++++++-- pytests/iroha_cli_tests/poetry.lock | 222 ++++++++++-------- .../iroha_cli_tests/test/events/__init__.py | 0 .../iroha_cli_tests/test/events/conftest.py | 11 + .../test/events/test_listen_events.py | 19 ++ 7 files changed, 241 insertions(+), 125 deletions(-) create mode 100644 pytests/iroha_cli_tests/test/events/__init__.py create mode 100644 pytests/iroha_cli_tests/test/events/conftest.py create mode 100644 pytests/iroha_cli_tests/test/events/test_listen_events.py diff --git a/Cargo.lock b/Cargo.lock index 2f5fc18928a..5392550e46e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2952,6 +2952,7 @@ dependencies = [ "erased-serde", "error-stack", "eyre", + "futures", "humantime", "iroha", "iroha_primitives", @@ -2960,6 +2961,7 @@ dependencies = [ "serde_json", "supports-color 2.1.0", "thiserror", + "tokio", "vergen", ] diff --git a/crates/iroha_cli/Cargo.toml b/crates/iroha_cli/Cargo.toml index b843a2dccc2..f92581e2073 100644 --- a/crates/iroha_cli/Cargo.toml +++ b/crates/iroha_cli/Cargo.toml @@ -40,6 +40,8 @@ serde = { workspace = true } serde_json = { workspace = true } erased-serde = "0.4.5" supports-color = { workspace = true } +tokio = { workspace = true, features = ["rt"] } +futures = { workspace = true } [build-dependencies] vergen = { version = "8.3.1", default-features = false } diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index c0fb1e9761e..8e80d366888 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -5,14 +5,17 @@ use std::{ io::{stdin, stdout}, path::PathBuf, str::FromStr, + time::Duration, }; use erased_serde::Serialize; use error_stack::{fmt::ColorMode, IntoReportCompat, ResultExt}; use eyre::{eyre, Error, Result, WrapErr}; +use futures::TryStreamExt; use iroha::{client::Client, config::Config, data_model::prelude::*}; use iroha_primitives::json::Json; use thiserror::Error; +use tokio::runtime::Runtime; /// Re-usable clap `--metadata ` (`-m`) argument. /// Should be combined with `#[command(flatten)]` attr. @@ -100,7 +103,6 @@ enum Subcommand { #[clap(subcommand)] Peer(peer::Args), /// The subcommand related to event streaming - #[clap(subcommand)] Events(events::Args), /// The subcommand related to Wasm Wasm(wasm::Args), @@ -305,9 +307,18 @@ mod events { use super::*; + #[derive(clap::Args, Debug, Clone, Copy)] + pub struct Args { + /// Wait timeout + #[clap(short, long, global = true)] + timeout: Option, + #[clap(subcommand)] + command: Command, + } + /// Get event stream from Iroha peer #[derive(clap::Subcommand, Debug, Clone, Copy)] - pub enum Args { + enum Command { /// Gets block pipeline events BlockPipeline, /// Gets transaction pipeline events @@ -322,24 +333,53 @@ mod events { impl RunArgs for Args { fn run(self, context: &mut dyn RunContext) -> Result<()> { - match self { - Args::TransactionPipeline => listen(TransactionEventFilter::default(), context), - Args::BlockPipeline => listen(BlockEventFilter::default(), context), - Args::Data => listen(DataEventFilter::Any, context), - Args::ExecuteTrigger => listen(ExecuteTriggerEventFilter::new(), context), - Args::TriggerCompleted => listen(TriggerCompletedEventFilter::new(), context), + let timeout: Option = self.timeout.map(Into::into); + + match self.command { + Command::TransactionPipeline => { + listen(TransactionEventFilter::default(), context, timeout) + } + Command::BlockPipeline => listen(BlockEventFilter::default(), context, timeout), + Command::Data => listen(DataEventFilter::Any, context, timeout), + Command::ExecuteTrigger => { + listen(ExecuteTriggerEventFilter::new(), context, timeout) + } + Command::TriggerCompleted => { + listen(TriggerCompletedEventFilter::new(), context, timeout) + } } } } - fn listen(filter: impl Into, context: &mut dyn RunContext) -> Result<()> { + fn listen( + filter: impl Into, + context: &mut dyn RunContext, + timeout: Option, + ) -> Result<()> { let filter = filter.into(); let client = context.client_from_config(); - eprintln!("Listening to events with filter: {filter:?}"); - client - .listen_for_events([filter]) - .wrap_err("Failed to listen for events.")? - .try_for_each(|event| context.print_data(&event?))?; + + if let Some(timeout) = timeout { + eprintln!("Listening to events with filter: {filter:?} and timeout: {timeout:?}"); + let rt = Runtime::new().wrap_err("Failed to create runtime.")?; + rt.block_on(async { + let mut stream = client + .listen_for_events_async([filter]) + .await + .expect("Failed to listen for events."); + while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { + context.print_data(&event?)?; + } + eprintln!("Timeout period has expired."); + Result::<()>::Ok(()) + })?; + } else { + eprintln!("Listening to events with filter: {filter:?}"); + client + .listen_for_events([filter]) + .wrap_err("Failed to listen for events.")? + .try_for_each(|event| context.print_data(&event?))?; + } Ok(()) } } @@ -354,22 +394,47 @@ mod blocks { pub struct Args { /// Block height from which to start streaming blocks height: NonZeroU64, + + /// Wait timeout + #[clap(short, long)] + timeout: Option, } impl RunArgs for Args { fn run(self, context: &mut dyn RunContext) -> Result<()> { - let Args { height } = self; - listen(height, context) + let Args { height, timeout } = self; + let timeout: Option = timeout.map(Into::into); + listen(height, context, timeout) } } - fn listen(height: NonZeroU64, context: &mut dyn RunContext) -> Result<()> { + fn listen( + height: NonZeroU64, + context: &mut dyn RunContext, + timeout: Option, + ) -> Result<()> { let client = context.client_from_config(); - eprintln!("Listening to blocks from height: {height}"); - client - .listen_for_blocks(height) - .wrap_err("Failed to listen for blocks.")? - .try_for_each(|event| context.print_data(&event?))?; + if let Some(timeout) = timeout { + eprintln!("Listening to blocks from height: {height} and timeout: {timeout:?}"); + let rt = Runtime::new().wrap_err("Failed to create runtime.")?; + rt.block_on(async { + let mut stream = client + .listen_for_blocks_async(height) + .await + .expect("Failed to listen for blocks."); + while let Ok(event) = tokio::time::timeout(timeout, stream.try_next()).await { + context.print_data(&event?)?; + } + eprintln!("Timeout period has expired."); + Result::<()>::Ok(()) + })?; + } else { + eprintln!("Listening to blocks from height: {height}"); + client + .listen_for_blocks(height) + .wrap_err("Failed to listen for blocks.")? + .try_for_each(|event| context.print_data(&event?))?; + } Ok(()) } } @@ -1377,6 +1442,7 @@ mod multisig { Ok(()) } } + #[cfg(test)] mod tests { use super::*; diff --git a/pytests/iroha_cli_tests/poetry.lock b/pytests/iroha_cli_tests/poetry.lock index 787e5b695dc..ccce5c38515 100644 --- a/pytests/iroha_cli_tests/poetry.lock +++ b/pytests/iroha_cli_tests/poetry.lock @@ -1,29 +1,29 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "allure-pytest" -version = "2.13.2" +version = "2.13.5" description = "Allure pytest integration" optional = false python-versions = "*" files = [ - {file = "allure-pytest-2.13.2.tar.gz", hash = "sha256:22243159e8ec81ce2b5254b4013802198821b1b42f118f69d4a289396607c7b3"}, - {file = "allure_pytest-2.13.2-py3-none-any.whl", hash = "sha256:17de9dbee7f61c8e66a5b5e818b00e419dbcea44cb55c24319401ba813220690"}, + {file = "allure-pytest-2.13.5.tar.gz", hash = "sha256:0ef8e1790c44a988db6b83c4d4f5e91451e2c4c8ea10601dfa88528d23afcf6e"}, + {file = "allure_pytest-2.13.5-py3-none-any.whl", hash = "sha256:94130bac32964b78058e62cf4b815ad97a5ac82a065e6dd2d43abac2be7640fc"}, ] [package.dependencies] -allure-python-commons = "2.13.2" +allure-python-commons = "2.13.5" pytest = ">=4.5.0" [[package]] name = "allure-python-commons" -version = "2.13.2" -description = "Common module for integrate allure with python-based frameworks" +version = "2.13.5" +description = "('Contains the API for end users as well as helper functions and classes to build Allure adapters for Python test frameworks',)" optional = false python-versions = ">=3.6" files = [ - {file = "allure-python-commons-2.13.2.tar.gz", hash = "sha256:8a03681330231b1deadd86b97ff68841c6591320114ae638570f1ed60d7a2033"}, - {file = "allure_python_commons-2.13.2-py3-none-any.whl", hash = "sha256:2bb3646ec3fbf5b36d178a5e735002bc130ae9f9ba80f080af97d368ba375051"}, + {file = "allure-python-commons-2.13.5.tar.gz", hash = "sha256:a232e7955811f988e49a4c1dd6c16cce7e9b81d0ea0422b1e5654d3254e2caf3"}, + {file = "allure_python_commons-2.13.5-py3-none-any.whl", hash = "sha256:8b0e837b6e32d810adec563f49e1d04127a5b6770e0232065b7cb09b9953980d"}, ] [package.dependencies] @@ -32,33 +32,33 @@ pluggy = ">=0.4.0" [[package]] name = "astroid" -version = "3.3.4" +version = "3.3.5" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" files = [ - {file = "astroid-3.3.4-py3-none-any.whl", hash = "sha256:5eba185467253501b62a9f113c263524b4f5d55e1b30456370eed4cdbd6438fd"}, - {file = "astroid-3.3.4.tar.gz", hash = "sha256:e73d0b62dd680a7c07cb2cd0ce3c22570b044dd01bd994bc3a2dd16c6cbba162"}, + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, ] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "black" @@ -106,63 +106,78 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -244,13 +259,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -273,13 +288,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "faker" -version = "30.8.2" +version = "33.0.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-30.8.2-py3-none-any.whl", hash = "sha256:4a82b2908cd19f3bba1a4da2060cc4eb18a40410ccdf9350d071d79dc92fe3ce"}, - {file = "faker-30.8.2.tar.gz", hash = "sha256:aa31b52cdae3673d6a78b4857c7bcdc0e98f201a5cb77d7827fa9e6b5876da94"}, + {file = "Faker-33.0.0-py3-none-any.whl", hash = "sha256:68e5580cb6b4226710886e595eabc13127149d6e71e9d1db65506a7fbe2c7fce"}, + {file = "faker-33.0.0.tar.gz", hash = "sha256:9b01019c1ddaf2253ca2308c0472116e993f4ad8fc9905f82fa965e0c6f932e9"}, ] [package.dependencies] @@ -403,13 +418,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -425,18 +440,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -455,24 +471,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pycodestyle" -version = "2.12.0" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, - {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -555,13 +571,13 @@ testing = ["filelock"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -605,13 +621,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [metadata] diff --git a/pytests/iroha_cli_tests/test/events/__init__.py b/pytests/iroha_cli_tests/test/events/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/pytests/iroha_cli_tests/test/events/conftest.py b/pytests/iroha_cli_tests/test/events/conftest.py new file mode 100644 index 00000000000..4570484926a --- /dev/null +++ b/pytests/iroha_cli_tests/test/events/conftest.py @@ -0,0 +1,11 @@ +from .. import ( + GIVEN_currently_authorized_account, +) + +import allure # type: ignore +import pytest + + +@pytest.fixture(scope="function", autouse=True) +def events_test_setup(): + allure.dynamic.feature("Events") diff --git a/pytests/iroha_cli_tests/test/events/test_listen_events.py b/pytests/iroha_cli_tests/test/events/test_listen_events.py new file mode 100644 index 00000000000..f43b27a002a --- /dev/null +++ b/pytests/iroha_cli_tests/test/events/test_listen_events.py @@ -0,0 +1,19 @@ +import allure # type: ignore +import pytest + +from ...src.iroha_cli import iroha_cli, have, iroha + + +@pytest.fixture(scope="function", autouse=True) +def story_account_transfers_domain(): + allure.dynamic.story("Account streams events") + + +@allure.label("sdk_test_id", "stream_data_events_timeouts") +def test_stream_data_events_timeouts(GIVEN_currently_authorized_account): + with allure.step( + f"WHEN {GIVEN_currently_authorized_account} streams block-pipeline events with timeout " + ): + iroha_cli.execute("events data --timeout 1s") + + iroha_cli.should(have.error("Timeout period has expired.\n"))