diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a8296282804..60ce2894198 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -171,7 +171,7 @@ Version bumps - Update docker publish target to ``hyperledger/iroha2``. - Updates the workflow to match ``main``. - Update API spec and fix health endpoint. -- Update Rust to 1.54. +- Update Rust to 1.54. - Update ``ursa`` version from 0.3.5 to 0.3.6. - Update workflows to new runners. - Update dockerfile for caching and faster CI builds. @@ -269,7 +269,7 @@ Other - 1451 Remove ``MAX_FAULTY_PEERS`` parameter. - Add handler for getting specific block hash. - Add new query FindTransactionByHash. -- 1185 Rename the crate from ``iroha`` to ``iroha_core``, update the path. +- 1185 Rename the crate from ``iroha`` to ``iroha_core``, update the path. - Fix logs and general improvements. - 1150 Introduce ``KURA_BLOCKS_PER_STORAGE_FILE`` setting which defaults to 1000 and defines the number of blocks per each new created datafile. @@ -760,4 +760,3 @@ Other - Replace deprecated param ``max_rounds_delay`` with ``proposal_creation_timeout``. Update sample config file to have not deprecated DB connection params. - diff --git a/cli/src/torii/routing.rs b/cli/src/torii/routing.rs index 9795263de17..bab16e86959 100644 --- a/cli/src/torii/routing.rs +++ b/cli/src/torii/routing.rs @@ -19,7 +19,10 @@ use iroha_core::{ wsv::WorldTrait, }; use iroha_crypto::SignatureOf; -use iroha_data_model::{prelude::*, query}; +use iroha_data_model::{ + prelude::*, + query::{self, SignedQueryRequest}, +}; #[cfg(feature = "telemetry")] use iroha_telemetry::metrics::Status; use parity_scale_codec::{Decode, Encode}; diff --git a/client/src/client.rs b/client/src/client.rs index 30c9c651c76..91e3ec5491c 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -13,7 +13,7 @@ use eyre::{eyre, Result, WrapErr}; use http_default::WebSocketStream; use iroha_config::{GetConfiguration, PostConfiguration}; use iroha_crypto::{HashOf, KeyPair}; -use iroha_data_model::prelude::*; +use iroha_data_model::{prelude::*, query::SignedQueryRequest}; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Status; use iroha_version::prelude::*; diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index fff5d2b5072..23670a7f39a 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -343,7 +343,7 @@ pub mod isi { /// Asset-related query implementations. pub mod query { - use eyre::{Result, WrapErr}; + use eyre::{Result, WrapErr as _}; use iroha_logger::prelude::*; use super::*; diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index 201f6217506..b7bb3056791 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -122,8 +122,13 @@ impl ValidQuery for QueryBox { FindTransactionByHash(query) => query.execute_into_value(wsv), FindPermissionTokensByAccountId(query) => query.execute_into_value(wsv), FindAssetDefinitionKeyValueByIdAndKey(query) => query.execute_into_value(wsv), + FindAllActiveTriggerIds(query) => query.execute_into_value(wsv), + FindTriggerById(query) => query.execute_into_value(wsv), + FindTriggerKeyValueByIdAndKey(query) => query.execute_into_value(wsv), FindAllRoles(query) => query.execute_into_value(wsv), + FindAllRoleIds(query) => query.execute_into_value(wsv), FindRolesByAccountId(query) => query.execute_into_value(wsv), + FindRoleByRoleId(query) => query.execute_into_value(wsv), } } } diff --git a/core/src/smartcontracts/isi/triggers.rs b/core/src/smartcontracts/isi/triggers.rs index b1eda42481b..5da7c75a54c 100644 --- a/core/src/smartcontracts/isi/triggers.rs +++ b/core/src/smartcontracts/isi/triggers.rs @@ -136,3 +136,61 @@ pub mod isi { } } } + +pub mod query { + //! Queries associated to triggers. + use iroha_logger::prelude::*; + + use super::*; + use crate::{ + prelude::*, + smartcontracts::{isi::prelude::WorldTrait, query::Error, Evaluate as _, FindError}, + }; + + impl ValidQuery for FindAllActiveTriggerIds { + #[log] + #[metrics(+"find_all_active_triggers")] + fn execute(&self, wsv: &WorldStateView) -> Result { + Ok(wsv.world.triggers.clone().into()) + } + } + + impl ValidQuery for FindTriggerById { + #[log] + #[metrics(+"find_trigger_by_id")] + fn execute(&self, wsv: &WorldStateView) -> Result { + let id = self + .id + .evaluate(wsv, &Context::new()) + .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {}", e)))?; + let action = wsv.world.triggers.get(&id)?; + + // TODO: Should we redact the metadata if the account is not the technical account/owner? + Ok(Trigger { + id, + action: action.clone(), + }) + } + } + + impl ValidQuery for FindTriggerKeyValueByIdAndKey { + #[log] + #[metrics(+"find_trigger_key_value_by_id_and_key")] + fn execute(&self, wsv: &WorldStateView) -> Result { + let id = self + .id + .evaluate(wsv, &Context::new()) + .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {}", e)))?; + let action = wsv.world.triggers.get(&id)?; + let key = self + .key + .evaluate(wsv, &Context::new()) + .map_err(|e| Error::Evaluate(format!("Failed to evaluate key. {}", e)))?; + action + .metadata + .get(&key) + .map(Clone::clone) + .ok_or_else(|| FindError::MetadataKey(key).into()) + } + } +} diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index c060ffba1bc..43714ab26e4 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -1,5 +1,7 @@ //! `World`-related ISI implementations. +use iroha_telemetry::metrics; + use super::prelude::*; use crate::prelude::*; @@ -7,7 +9,6 @@ use crate::prelude::*; pub mod isi { use eyre::Result; use iroha_data_model::prelude::*; - use iroha_telemetry::metrics; use super::*; @@ -202,6 +203,7 @@ pub mod query { impl ValidQuery for FindAllRoles { #[log] + #[metrics(+"find_all_roles")] fn execute(&self, wsv: &WorldStateView) -> Result { Ok(wsv .world @@ -212,8 +214,39 @@ pub mod query { } } + impl ValidQuery for FindAllRoleIds { + #[log] + #[metrics(+"find_all_role_ids")] + fn execute(&self, wsv: &WorldStateView) -> Result { + Ok(wsv + .world + .roles + .iter() + // To me, this should probably be a method, not a field. + .map(|role| role.id().clone()) + .collect()) + } + } + + impl ValidQuery for FindRoleByRoleId { + #[log] + #[metrics(+"find_role_by_role_id")] + fn execute(&self, wsv: &WorldStateView) -> Result { + let role_id = self + .id + .evaluate(wsv, &Context::new()) + .map_err(|e| Error::Evaluate(e.to_string()))?; + + wsv.world.roles.get(&role_id).map_or_else( + || Err(Error::Find(Box::new(FindError::Role(role_id)))), + |role_ref| Ok(role_ref.clone()), + ) + } + } + impl ValidQuery for FindAllPeers { #[log] + #[metrics("find_all_peers")] fn execute(&self, wsv: &WorldStateView) -> Result { Ok(wsv.peers()) } diff --git a/core/src/triggers.rs b/core/src/triggers.rs index fb217baf21f..ff55d452bc6 100644 --- a/core/src/triggers.rs +++ b/core/src/triggers.rs @@ -24,6 +24,14 @@ pub struct TriggerSet( DashMap, // TODO: Consider tree structures. ); +impl From for Vec { + fn from(TriggerSet(map): TriggerSet) -> Self { + map.iter() + .map(|reference| reference.key().clone()) + .collect() + } +} + impl TriggerSet { /// Add another trigger to the [`TriggerSet`]. /// @@ -46,13 +54,8 @@ impl TriggerSet { /// /// # Errors /// - If [`TriggerSet`] doesn't contain the trigger with the given `id`. - pub fn get( - &self, - id: &trigger::Id, - ) -> Result + '_, smartcontracts::Error> { - self.0 - .get(id) - .ok_or_else(|| smartcontracts::Error::Find(Box::new(FindError::Trigger(id.clone())))) + pub fn get(&self, id: &trigger::Id) -> Result + '_, FindError> { + self.0.get(id).ok_or_else(|| FindError::Trigger(id.clone())) } /// Remove a trigger from the [`TriggerSet`]. @@ -76,6 +79,13 @@ impl TriggerSet { self.0.contains_key(key) } + /// Forward the internal immutable iterator. + pub fn iter( + &self, + ) -> dashmap::iter::Iter { + self.0.iter() + } + /// Modify repetitions of the hook identified by [`trigger::Id`]. /// /// # Errors diff --git a/data_model/src/query.rs b/data_model/src/query.rs index 9915eeecc43..92ba1fee5a6 100644 --- a/data_model/src/query.rs +++ b/data_model/src/query.rs @@ -14,7 +14,9 @@ use iroha_version::prelude::*; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -use self::{account::*, asset::*, domain::*, peer::*, permissions::*, role::*, transaction::*}; +use self::{ + account::*, asset::*, domain::*, peer::*, permissions::*, role::*, transaction::*, trigger::*, +}; use crate::{account::Account, pagination::Pagination, Identifiable, Value}; /// Sized container for all possible Queries. @@ -80,8 +82,18 @@ pub enum QueryBox { FindTransactionByHash(FindTransactionByHash), /// [`FindPermissionTokensByAccountId`] variant. FindPermissionTokensByAccountId(FindPermissionTokensByAccountId), + /// [`FindAllActiveTriggers`] variant. + FindAllActiveTriggerIds(FindAllActiveTriggerIds), + /// [`FindTriggerById`] variant. + FindTriggerById(FindTriggerById), + /// [`FindTriggerKeyValueByIdAndKey`] variant. + FindTriggerKeyValueByIdAndKey(FindTriggerKeyValueByIdAndKey), /// [`FindAllRoles`] variant. FindAllRoles(FindAllRoles), + /// [`FindAllRoleIds`] variant. + FindAllRoleIds(FindAllRoleIds), + /// [`FindRoleByRoleId`] variant. + FindRoleByRoleId(FindRoleByRoleId), /// [`FindRolesByAccountId`] variant. FindRolesByAccountId(FindRolesByAccountId), } @@ -219,7 +231,52 @@ pub mod role { type Output = Vec; } - /// `FindRolesByAccountId` Iroha Query will find an `Role`s for a specified account. + /// `FindAllRoles` Iroha Query will find all `Roles`s presented. + #[derive( + Debug, + Clone, + Copy, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct FindAllRoleIds; + + impl Query for FindAllRoleIds { + type Output = Vec<::Id>; + } + + /// `FindRoleByRoleId` Iroha Query to find the [`Role`] which has the given [`Id`] + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct FindRoleByRoleId { + /// `Id` of the `Role` to find + pub id: EvaluatesTo<::Id>, + } + + impl Query for FindRoleByRoleId { + type Output = Role; + } + + /// `FindRolesByAccountId` Iroha Query will find an [`Role`]s for a specified account. #[derive( Debug, Clone, @@ -235,16 +292,16 @@ pub mod role { )] pub struct FindRolesByAccountId { /// `Id` of an account to find. - pub id: EvaluatesTo, + pub id: EvaluatesTo<::Id>, } impl Query for FindRolesByAccountId { - type Output = Vec; + type Output = Vec<::Id>; } /// The prelude re-exports most commonly used traits, structs and macros from this module. pub mod prelude { - pub use super::{FindAllRoles, FindRolesByAccountId}; + pub use super::{FindAllRoleIds, FindAllRoles, FindRoleByRoleId, FindRolesByAccountId}; } } @@ -1047,6 +1104,95 @@ pub mod peer { } } +pub mod trigger { + //! Trigger-related queries. + #[cfg(not(feature = "std"))] + use alloc::{format, string::String, vec::Vec}; + + use iroha_schema::prelude::*; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + + use super::Query; + use crate::{expression::EvaluatesTo, trigger::Trigger, Identifiable, Name, Value}; + + /// Find all currently active (as in not disabled and/or expired) + /// trigger IDs. + #[derive( + Debug, + Clone, + Copy, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct FindAllActiveTriggerIds; + + impl Query for FindAllActiveTriggerIds { + type Output = Vec<::Id>; + } + + /// Find Trigger given its ID. + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct FindTriggerById { + /// The Identification of the trigger to be found. + pub id: EvaluatesTo<::Id>, + } + + impl Query for FindTriggerById { + type Output = Trigger; + } + + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + /// Find Trigger's metadata key-value pairs. + pub struct FindTriggerKeyValueByIdAndKey { + /// The Identification of the trigger to be found. + pub id: EvaluatesTo<::Id>, + /// The key inside the metadata dictionary to be returned. + pub key: EvaluatesTo, + } + + impl Query for FindTriggerKeyValueByIdAndKey { + type Output = Value; + } + + pub mod prelude { + //! Prelude Re-exports most commonly used traits, structs and macros from this crate. + pub use super::{FindAllActiveTriggerIds, FindTriggerById, FindTriggerKeyValueByIdAndKey}; + } +} + pub mod transaction { //! Queries related to `Transaction`. @@ -1139,8 +1285,8 @@ pub mod transaction { pub mod prelude { pub use super::{ account::prelude::*, asset::prelude::*, domain::prelude::*, peer::prelude::*, - permissions::prelude::*, role::prelude::*, transaction::*, PaginatedQueryResult, Query, - QueryBox, QueryResult, SignedQueryRequest, VersionedPaginatedQueryResult, + permissions::prelude::*, role::prelude::*, transaction::*, trigger::prelude::*, + PaginatedQueryResult, Query, QueryBox, QueryResult, VersionedPaginatedQueryResult, VersionedQueryResult, }; #[cfg(feature = "warp")] diff --git a/data_model/src/trigger.rs b/data_model/src/trigger.rs index a989ff0bf65..ac1a7651488 100644 --- a/data_model/src/trigger.rs +++ b/data_model/src/trigger.rs @@ -2,7 +2,7 @@ #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use core::{cmp::Ordering, str::FromStr}; +use core::{cmp::Ordering, fmt, str::FromStr}; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; @@ -22,8 +22,6 @@ pub struct Trigger { pub id: ::Id, /// Action to be performed when the trigger matches. pub action: Action, - /// Metadata of this account as a key-value store. - pub metadata: Metadata, } impl Trigger { @@ -32,18 +30,7 @@ impl Trigger { id: ::Id, action: Action, ) -> ::RegisteredWith { - Self { - id, - action, - metadata: Metadata::new(), - } - } - - /// Add [`Metadata`] to the trigger replacing previously defined - #[must_use] - pub fn with_metadata(mut self, metadata: Metadata) -> Self { - self.metadata = metadata; - self + Self { id, action } } } @@ -78,6 +65,8 @@ pub struct Action { pub technical_account: super::account::Id, /// Defines events which trigger the `Action` pub filter: EventFilter, + /// Metadata used as persistent storage for trigger data. + pub metadata: Metadata, } impl Action { @@ -94,8 +83,16 @@ impl Action { // TODO: At this point the technical account is meaningless. technical_account, filter, + metadata: Metadata::new(), } } + + /// Add [`Metadata`] to the trigger replacing previously defined + #[must_use] + pub fn with_metadata(mut self, metadata: Metadata) -> Self { + self.metadata = metadata; + self + } } impl PartialOrd for Action { @@ -165,6 +162,12 @@ pub struct Id { pub name: Name, } +impl fmt::Display for Id { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.name.fmt(f) + } +} + impl Id { /// Construct [`Id`], while performing lenght checks and acceptable character validation. /// diff --git a/permissions_validators/src/private_blockchain/query.rs b/permissions_validators/src/private_blockchain/query.rs index 381f69e6b3d..a4006d51bf4 100644 --- a/permissions_validators/src/private_blockchain/query.rs +++ b/permissions_validators/src/private_blockchain/query.rs @@ -29,8 +29,44 @@ impl IsAllowed for OnlyAccountsDomain { FindAllDomains(_) => { Err("Only access to the domain of the account is permitted.".to_owned()) } - FindAllRoles(_) => Ok(()), - FindAllPeers(_) => Ok(()), + FindAllRoles(_) => { + Err("Only access to roles of the same domain is permitted.".to_owned()) + } + FindAllRoleIds(_) => Ok(()), // In case you need to debug the permissions. + FindRoleByRoleId(_) => { + Err("Only access to roles of the same domain is permitted.".to_owned()) + } + FindAllPeers(_) => Ok(()), // Can be obtained in other ways, so why hide it. + FindAllActiveTriggerIds(_) => Ok(()), + // Private blockchains should have debugging too, hence + // all accounts should also be + FindTriggerById(query) => { + let id = query + .id + .evaluate(wsv, &context) + .map_err(|e| e.to_string())?; + let trigger = wsv.world.triggers.get(&id).map_err(|err| err.to_string())?; + if trigger.technical_account == *authority { + Ok(()) + } else { + Err("Cannot access Trigger if you're not the technical account.".to_owned()) + } + } + FindTriggerKeyValueByIdAndKey(query) => { + let id = query + .id + .evaluate(wsv, &context) + .map_err(|e| e.to_string())?; + let trigger = wsv.world.triggers.get(&id).map_err(|err| err.to_string())?; + if trigger.technical_account == *authority { + Ok(()) + } else { + Err( + "Cannot access Trigger internal state if you're not the technical account." + .to_owned(), + ) + } + } FindAccountById(query) => { let account_id = query .id @@ -247,22 +283,65 @@ impl IsAllowed for OnlyAccountsData { let context = Context::new(); match query { FindAccountsByName(_) - | FindAccountsByDomainId(_) - | FindAllAccounts(_) - | FindAllAssetsDefinitions(_) - | FindAssetsByAssetDefinitionId(_) - | FindAssetsByDomainId(_) - | FindAssetsByName(_) - | FindAllDomains(_) - | FindDomainById(_) - | FindDomainKeyValueByIdAndKey(_) - | FindAssetsByDomainIdAndAssetDefinitionId(_) - | FindAssetDefinitionKeyValueByIdAndKey(_) - | FindAllAssets(_) => { - Err("Only access to the assets of the same domain is permitted.".to_owned()) + | FindAccountsByDomainId(_) + | FindAllAccounts(_) => { + Err("Other accounts are private.".to_owned()) + } + | FindAllDomains(_) + | FindDomainById(_) + | FindDomainKeyValueByIdAndKey(_) => { + Err("Only access to your account's data is permitted.".to_owned()) + }, + FindAssetsByDomainIdAndAssetDefinitionId(_) + | FindAssetsByName(_) // TODO: I think this is a mistake. + | FindAssetsByDomainId(_) + | FindAllAssetsDefinitions(_) + | FindAssetsByAssetDefinitionId(_) + | FindAssetDefinitionKeyValueByIdAndKey(_) + | FindAllAssets(_) => { + Err("Only access to the assets of your account is permitted.".to_owned()) + } + FindAllRoles(_) | FindAllRoleIds(_) | FindRoleByRoleId(_) => { + Err("Only access to roles of the same account is permitted.".to_owned()) + }, + | FindAllActiveTriggerIds(_) => { + Err("Only access to the triggers of the same account is permitted.".to_owned()) + } + FindAllPeers(_) => { + Err("Only access to your account-local data is permitted.".to_owned()) + } + FindTriggerById(query) => { + // TODO: should differentiate between global and domain-local triggers. + let id = query + .id + .evaluate(wsv, &context) + .map_err(|e| e.to_string())?; + if let Ok(trigger) = wsv.world.triggers.get(&id) { + if trigger.technical_account == *authority { + return Ok(()); + } + } + Err(format!( + "A trigger with the specified Id: {} is not accessible to you", + id + )) + } + FindTriggerKeyValueByIdAndKey(query) => { + // TODO: should differentiate between global and domain-local triggers. + let id = query + .id + .evaluate(wsv, &context) + .map_err(|e| e.to_string())?; + if let Ok(trigger) = wsv.world.triggers.get(&id) { + if trigger.technical_account == *authority { + return Ok(()); + } + } + Err(format!( + "A trigger with the specified Id: {} is not accessible to you", + id + )) } - FindAllRoles(_) => Ok(()), - FindAllPeers(_) => Ok(()), FindAccountById(query) => { let account_id = query .id @@ -272,8 +351,9 @@ impl IsAllowed for OnlyAccountsData { Ok(()) } else { Err(format!( - "Cannot access account {} as only access to your own account is permitted..", - account_id + "Cannot access account {} as only access to your own account, {} is permitted..", + account_id, + authority )) } } diff --git a/tools/kagami/Cargo.toml b/tools/kagami/Cargo.toml index 6d4d6e49c5a..618da8daaaf 100644 --- a/tools/kagami/Cargo.toml +++ b/tools/kagami/Cargo.toml @@ -20,6 +20,7 @@ dev-telemetry = ["iroha_core/dev-telemetry"] [dependencies] + iroha_core = { version = "=2.0.0-pre-rc.4", path = "../../core" } iroha_crypto = { version = "=2.0.0-pre-rc.4", path = "../../crypto" } iroha_config = { version = "=2.0.0-pre-rc.4", path = "../../config" } @@ -27,6 +28,7 @@ iroha_data_model = { version = "=2.0.0-pre-rc.4", path = "../../data_model" } iroha_schema_gen = { version = "=2.0.0-pre-rc.4", path = "../../schema/gen" } iroha_schema = { version = "=2.0.0-pre-rc.4", path = "../../schema" } iroha_permissions_validators = { version = "=2.0.0-pre-rc.4", path = "../../permissions_validators" } + iroha = { path = "../../cli" } color-eyre = "0.5.11" diff --git a/tools/parity_scale_decoder/src/generate_map.rs b/tools/parity_scale_decoder/src/generate_map.rs index 843e7b8b5f9..428b413b8ae 100644 --- a/tools/parity_scale_decoder/src/generate_map.rs +++ b/tools/parity_scale_decoder/src/generate_map.rs @@ -4,7 +4,7 @@ use std::collections::BTreeSet; use iroha_core::*; use iroha_crypto::*; -use iroha_data_model::{prelude::*, *}; +use iroha_data_model::{prelude::*, query::SignedQueryRequest, *}; use iroha_schema::IntoSchema; use iroha_version::*; @@ -145,6 +145,8 @@ pub fn generate_map() -> DumpDecodedMap { FindAllParameters, FindAllPeers, FindAllRoles, + FindAllRoleIds, + FindAllActiveTriggerIds, FindAssetById, FindAssetDefinitionKeyValueByIdAndKey, FindAssetKeyValueByIdAndKey, @@ -158,8 +160,11 @@ pub fn generate_map() -> DumpDecodedMap { FindDomainKeyValueByIdAndKey, FindPermissionTokensByAccountId, FindRolesByAccountId, + FindRoleByRoleId, FindTransactionByHash, FindTransactionsByAccountId, + FindTriggerKeyValueByIdAndKey, + FindTriggerById, GenesisDomain, GrantBox, Greater, @@ -288,7 +293,6 @@ pub fn generate_map() -> DumpDecodedMap { VersionedPaginatedQueryResult, WasmExecutionFail, Where, - [u8; 32], account::NewAccount, asset::Mintable, block::BlockHeader, @@ -324,6 +328,8 @@ pub fn generate_map() -> DumpDecodedMap { expression::EvaluatesTo>, expression::EvaluatesTo, expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, fixed::FixNum, fixed::Fixed, i64, @@ -347,6 +353,7 @@ pub fn generate_map() -> DumpDecodedMap { u32, u64, u8, + [u8; 32], }; map.insert( diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs index 6df7e237779..5418dd8228b 100644 --- a/tools/parity_scale_decoder/src/main.rs +++ b/tools/parity_scale_decoder/src/main.rs @@ -240,6 +240,10 @@ mod tests { #[test] fn decode_trigger_sample() { + // This test is extremely awkward to update. There are no + // instructions for how to do so, and I'm willing to bet that + // any of the community members who want to adjust the + // triggers will not know what to do. decode_sample( "trigger.bin", String::from("iroha_data_model::trigger::Trigger"), @@ -301,16 +305,16 @@ mod tests { ), ), ), - }, - metadata: Metadata { - map: {}, + metadata: Metadata { + map: {}, + }, }, } "###, ); } - #[allow(clippy::unwrap_used)] + #[allow(clippy::expect_used)] fn decode_sample(sample_path: &str, type_id: String, expected_output: &str) { let mut binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")); binary.push("samples/"); @@ -323,8 +327,10 @@ mod tests { let map = generate_map(); let decoder = Decoder::new(args, &map); let mut buf = Vec::new(); - decoder.decode(&mut buf).unwrap(); - - assert_eq!(String::from_utf8(buf).unwrap(), expected_output); + decoder.decode(&mut buf).expect("Decoding failed"); + let actual = String::from_utf8(buf).expect("valid UTF-8"); + // Predictably, the string-based comparison is white-space sensitive. + println!("{}\n{}", actual, expected_output); + assert_eq!(actual, expected_output); } }