From ad00930fbcad52758f35fad519472dc3606a75fd Mon Sep 17 00:00:00 2001 From: Daniil Date: Fri, 22 Apr 2022 22:07:03 +0300 Subject: [PATCH] [feature] #2099: Add WASM integration test based on Orillion use-case (#2122) Signed-off-by: Daniil Polyakov --- .github/workflows/iroha2-dev-pr-static.yml | 4 +- Dockerfile.ci | 1 + cli/src/samples.rs | 9 ++ cli/src/stream.rs | 2 +- client/Cargo.toml | 2 +- client/build.rs | 49 +++++++++ .../Cargo.toml | 26 +++++ .../src/lib.rs | 70 ++++++++++++ .../integration/triggers/time_trigger.rs | 101 +++++++++++++++++- core/src/smartcontracts/wasm.rs | 27 ++++- core/src/sumeragi/fault.rs | 2 +- core/src/sumeragi/network_topology.rs | 2 +- core/src/wsv.rs | 8 +- docs/source/references/config.md | 21 ++++ futures/src/lib.rs | 2 +- wasm/Cargo.toml | 4 + wasm/derive/src/lib.rs | 1 + wasm/src/debug.rs | 56 ++++++++++ wasm/src/lib.rs | 20 ++++ 19 files changed, 392 insertions(+), 15 deletions(-) create mode 100644 client/build.rs create mode 100644 client/tests/integration/create_nft_for_every_user_smartcontract/Cargo.toml create mode 100644 client/tests/integration/create_nft_for_every_user_smartcontract/src/lib.rs create mode 100644 wasm/src/debug.rs diff --git a/.github/workflows/iroha2-dev-pr-static.yml b/.github/workflows/iroha2-dev-pr-static.yml index ac5e48d78a7..2def0484fe6 100644 --- a/.github/workflows/iroha2-dev-pr-static.yml +++ b/.github/workflows/iroha2-dev-pr-static.yml @@ -24,10 +24,10 @@ jobs: - name: Format check run: cargo +nightly-2022-04-20 fmt --all -- --check - name: Static analysis without features - run: cargo lints clippy --workspace --benches --tests --examples --quiet --no-default-features + run: cargo +nightly-2022-04-20 lints clippy --workspace --benches --tests --examples --quiet --no-default-features if: always() - name: Static analysis with all features enabled - run: cargo lints clippy --workspace --benches --tests --examples --quiet --all-features + run: cargo +nightly-2022-04-20 lints clippy --workspace --benches --tests --examples --quiet --all-features if: always() - name: Verify iroha_data_model still supports no_std run: cargo nono check --package iroha_data_model --no-default-features diff --git a/Dockerfile.ci b/Dockerfile.ci index e35c8efcfe1..cdc17e2882b 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -51,6 +51,7 @@ RUN git clone https://github.com/rui314/mold.git; \ RUN rustup component add llvm-tools-preview clippy; \ rustup target add wasm32-unknown-unknown; \ rustup install --profile default nightly-2022-04-20; \ + rustup component add rust-src --toolchain nightly-2022-04-20-x86_64-unknown-linux-gnu; \ cargo install cargo-lints cargo-nono webassembly-test-runner grcov RUN curl -fsSL https://get.docker.com -o get-docker.sh; \ diff --git a/cli/src/samples.rs b/cli/src/samples.rs index a1d6bda4384..2f6ce4a682b 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -7,7 +7,9 @@ use iroha_core::{ genesis::config::GenesisConfiguration, kura::config::KuraConfiguration, queue::Configuration as QueueConfiguration, + smartcontracts::wasm::config::Configuration as WasmConfiguration, sumeragi::config::{SumeragiConfiguration, TrustedPeers}, + wsv::config::Configuration as WsvConfiguration, }; use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::peer::Id as PeerId; @@ -101,6 +103,13 @@ pub fn get_config(trusted_peers: HashSet, key_pair: Option) -> account_private_key: Some(private_key), ..GenesisConfiguration::default() }, + wsv: WsvConfiguration { + wasm_runtime_config: WasmConfiguration { + fuel_limit: 10_000_000, + ..WasmConfiguration::default() + }, + ..WsvConfiguration::default() + }, ..Configuration::default() } } diff --git a/cli/src/stream.rs b/cli/src/stream.rs index 15c96287551..f9c94dd2d33 100644 --- a/cli/src/stream.rs +++ b/cli/src/stream.rs @@ -121,7 +121,7 @@ pub trait Stream: impl StreamMessage for warp::ws::Message { fn binary(source: Vec) -> Self { - Self::binary(source) + warp::ws::Message::binary(source) } fn as_bytes(&self) -> &[u8] { diff --git a/client/Cargo.toml b/client/Cargo.toml index 813dbc80645..3dacc8a6cd9 100644 --- a/client/Cargo.toml +++ b/client/Cargo.toml @@ -37,7 +37,7 @@ base64 = "0.13.0" [dev-dependencies] -iroha_core = { version = "=2.0.0-pre-rc.3", path = "../core", features = ["dev-telemetry", "telemetry"] } +iroha_core = { version = "=2.0.0-pre-rc.3", path = "../core", features = ["dev-telemetry", "telemetry"]} iroha_permissions_validators = { version = "=2.0.0-pre-rc.3", path = "../permissions_validators" } iroha = { path = "../cli", features = ["dev-telemetry", "telemetry"] } diff --git a/client/build.rs b/client/build.rs new file mode 100644 index 00000000000..94d2e1aed79 --- /dev/null +++ b/client/build.rs @@ -0,0 +1,49 @@ +//! Build script which builds smartcontract for test +//! +//! Technically this script is used only for testing purposes, but current cargo implementation +//! doesn't allow to run build script only for tests or get info about current profile from it. +//! See [cargo issue #4001](https://github.com/rust-lang/cargo/issues/4001) + +use std::{env, path::Path, process::Command}; + +#[allow(clippy::expect_used)] +fn main() { + let manifest_dir = + env::var("CARGO_MANIFEST_DIR").expect("Expected `CARGO_MANIFEST_DIR` environment variable"); + let smartcontract_path = + Path::new(&manifest_dir).join("tests/integration/create_nft_for_every_user_smartcontract"); + let out_dir = env::var_os("OUT_DIR").expect("Expected `OUT_DIR` environment variable"); + + println!("cargo:rerun-if-changed=.."); + + let fmt = Command::new("cargo") + // Removing environment variable to avoid + // `error: infinite recursion detected` when running `cargo lints` + .env_remove("RUST_RECURSION_COUNT") + .current_dir(smartcontract_path.clone()) + .args(&["+nightly-2022-04-20", "fmt", "--all"]) + .status() + .expect("Failed to run `cargo fmt` on smartcontract"); + assert!(fmt.success(), "Can't format smartcontract"); + + let build = Command::new("cargo") + // Removing environment variable to avoid + // `error: infinite recursion detected` when running `cargo lints` + .env_remove("RUST_RECURSION_COUNT") + .env("CARGO_TARGET_DIR", out_dir) + .current_dir(smartcontract_path) + .args(&[ + "+nightly-2022-04-20", + "build", + "--release", + "-Z", + "build-std", + "-Z", + "build-std-features=panic_immediate_abort", + "--target", + "wasm32-unknown-unknown", + ]) + .status() + .expect("Failed to run `cargo build` on smartcontract"); + assert!(build.success(), "Can't build smartcontract") +} diff --git a/client/tests/integration/create_nft_for_every_user_smartcontract/Cargo.toml b/client/tests/integration/create_nft_for_every_user_smartcontract/Cargo.toml new file mode 100644 index 00000000000..d8864eb7e7f --- /dev/null +++ b/client/tests/integration/create_nft_for_every_user_smartcontract/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "create_nft_for_every_user_smartcontract" +version = "2.0.0-pre-rc.3" +authors = ["Iroha 2 team "] +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +# Smartcontract should be linked dynamically so that it may link to functions exported +# from the host environment. Also, host environment executes the smartcontract by +# calling the function which smartcontract exports(entry point of execution) +crate-type = ['cdylib'] + +# Empty workspace to fix "current package believes it's in a workspace when it's not" +[workspace] + +[profile.release] +strip = "debuginfo" # Remove debugging info from the binary +panic = "abort" # Panics are transcribed to Traps when compiling for wasm anyways +lto = true # Link-time-optimization produces notable decrease in binary size +opt-level = "z" # Optimize for size vs speed with "s"/"z"(removes vectorization) +codegen-units = 1 # Further reduces binary size but increases compilation time + +[dependencies] +iroha_wasm = { path = "../../../../wasm", features = ["debug"]} diff --git a/client/tests/integration/create_nft_for_every_user_smartcontract/src/lib.rs b/client/tests/integration/create_nft_for_every_user_smartcontract/src/lib.rs new file mode 100644 index 00000000000..6c0d2a9b30b --- /dev/null +++ b/client/tests/integration/create_nft_for_every_user_smartcontract/src/lib.rs @@ -0,0 +1,70 @@ +//! Smartcontract which creates new nft for every user +//! +//! This module isn't included in the build-tree, +//! but instead it is being built by a `client/build.rs` + +#![no_std] +#![no_main] +#![allow(clippy::all)] + +extern crate alloc; + +use alloc::{format, string::ToString, vec::Vec}; +use core::str::FromStr; + +use iroha_wasm::{data_model::prelude::*, DebugUnwrapExt, Execute}; + +#[iroha_wasm::iroha_wasm] +fn smartcontract_entry_point(_account_id: AccountId) { + let query = QueryBox::FindAllAccounts(FindAllAccounts {}); + let accounts: Vec = query.execute().try_into().dbg_unwrap(); + + let limits = MetadataLimits::new(256, 256); + + for account in accounts { + let mut metadata = Metadata::new(); + let name = format!( + "nft_for_{}_in_{}", + account.id().name, + account.id().domain_id + ) + .parse() + .dbg_unwrap(); + metadata + .insert_with_limits(name, true.into(), limits) + .dbg_unwrap(); + + let nft_id = generate_new_nft_id(account.id()); + let nft_definition = AssetDefinition::store(nft_id.clone()) + .mintable_once() + .with_metadata(metadata) + .build(); + let account_nft_id = ::Id::new(nft_id, account.id().clone()); + + Instruction::Register(RegisterBox::new(nft_definition)).execute(); + Instruction::SetKeyValue(SetKeyValueBox::new( + account_nft_id, + Name::from_str("has_this_nft").dbg_unwrap(), + Value::Bool(true), + )) + .execute(); + } +} + +fn generate_new_nft_id(account_id: &::Id) -> AssetDefinitionId { + let query = QueryBox::FindAssetsByAccountId(FindAssetsByAccountId::new(account_id.clone())); + let assets: Vec = query.execute().try_into().dbg_unwrap(); + + let new_number = assets + .into_iter() + .filter(|asset| asset.id().definition_id.to_string().starts_with("nft_")) + .count() + + 1; + + format!( + "nft_number_{}_for_{}#{}", + new_number, account_id.name, account_id.domain_id + ) + .parse() + .dbg_unwrap() +} diff --git a/client/tests/integration/triggers/time_trigger.rs b/client/tests/integration/triggers/time_trigger.rs index b390a857cbb..85590873ab9 100644 --- a/client/tests/integration/triggers/time_trigger.rs +++ b/client/tests/integration/triggers/time_trigger.rs @@ -1,11 +1,11 @@ #![allow(clippy::restriction)] -use std::{str::FromStr as _, time::Duration}; +use std::{fs, str::FromStr as _, time::Duration}; -use eyre::Result; +use eyre::{Context, Result}; use iroha_client::client::{self, Client}; use iroha_core::block::DEFAULT_CONSENSUS_ESTIMATION_MS; -use iroha_data_model::prelude::*; +use iroha_data_model::{prelude::*, transaction::WasmSmartContract}; use test_network::{Peer as TestPeer, *}; /// Macro to abort compilation, if `e` isn't `true` @@ -52,7 +52,12 @@ fn time_trigger_execution_count_error_should_be_less_than_10_percent() -> Result )); test_client.submit(register_trigger)?; - submit_sample_isi_on_every_block_commit(&mut test_client, &account_id, 3)?; + submit_sample_isi_on_every_block_commit( + &mut test_client, + &account_id, + Duration::from_secs(1), + 3, + )?; std::thread::sleep(Duration::from_millis(DEFAULT_CONSENSUS_ESTIMATION_MS)); let finish_time = current_time(); @@ -101,6 +106,7 @@ fn change_asset_metadata_after_1_sec() -> Result<()> { submit_sample_isi_on_every_block_commit( &mut test_client, &account_id, + Duration::from_secs(1), usize::try_from(PERIOD_MS / DEFAULT_CONSENSUS_ESTIMATION_MS + 1)?, )?; @@ -163,6 +169,90 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { Ok(()) } +#[test] +fn mint_nft_for_every_user_every_1_sec() -> Result<()> { + const TRIGGER_PERIOD_MS: u64 = 1000; + const EXPECTED_COUNT: u64 = 4; + + let (_rt, _peer, mut test_client) = ::start_test_with_runtime(); + wait_for_genesis_committed(&vec![test_client.clone()], 0); + + let alice_id = "alice@wonderland" + .parse::<::Id>() + .expect("Valid"); + + let accounts: Vec = vec![ + alice_id.clone(), + "mad_hatter@wonderland".parse().expect("Valid"), + "cheshire_cat@wonderland".parse().expect("Valid"), + "caterpillar@wonderland".parse().expect("Valid"), + "white_rabbit@wonderland".parse().expect("Valid"), + ]; + + // Registering accounts + let register_accounts = accounts + .iter() + .skip(1) // Alice has already been registered in genesis + .cloned() + .map(|account_id| RegisterBox::new(Account::new(account_id, [])).into()) + .collect::>(); + test_client.submit_all_blocking(register_accounts)?; + + // Reading wasm smartcontract + let wasm = fs::read(concat!( + env!("OUT_DIR"), + "/wasm32-unknown-unknown/release/create_nft_for_every_user_smartcontract.wasm" + )) + .wrap_err("Can't read smartcontract")?; + println!("wasm size is {} bytes", wasm.len()); + + // Registering trigger + let start_time = current_time(); + let schedule = + TimeSchedule::starting_at(start_time).with_period(Duration::from_millis(TRIGGER_PERIOD_MS)); + let register_trigger = RegisterBox::new(Trigger::new( + "mint_nft_for_all".parse()?, + Action::new( + Executable::Wasm(WasmSmartContract { raw_data: wasm }), + Repeats::Indefinitely, + alice_id.clone(), + EventFilter::Time(TimeEventFilter(ExecutionTime::Schedule(schedule))), + ), + )); + test_client.submit(register_trigger)?; + + // Time trigger will be executed on block commits, so we have to produce some transactions + submit_sample_isi_on_every_block_commit( + &mut test_client, + &alice_id, + Duration::from_millis(TRIGGER_PERIOD_MS), + usize::try_from(EXPECTED_COUNT)?, + )?; + + // Checking results + for account_id in accounts { + let start_pattern = "nft_number_"; + let end_pattern = format!("_for_{}#{}", account_id.name, account_id.domain_id); + let assets = test_client.request(client::asset::by_account_id(account_id.clone()))?; + let count: u64 = assets + .into_iter() + .filter(|asset| { + let s = asset.id().definition_id.to_string(); + s.starts_with(&start_pattern) && s.ends_with(&end_pattern) + }) + .count() + .try_into() + .expect("`usize` should always fit in `u64`"); + + assert!( + count >= EXPECTED_COUNT, + "{account_id} has {count} NFT, but at least {EXPECTED_COUNT} expected", + ); + } + + Ok(()) +} + /// Get asset numeric value fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { let asset = client.request(client::asset::by_id(asset_id))?; @@ -173,6 +263,7 @@ fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { fn submit_sample_isi_on_every_block_commit( test_client: &mut Client, account_id: &AccountId, + timeout: Duration, times: usize, ) -> Result<()> { let block_filter = @@ -189,7 +280,7 @@ fn submit_sample_isi_on_every_block_commit( }) .take(times) { - std::thread::sleep(Duration::from_secs(1)); + std::thread::sleep(timeout); // ISI just to create a new block let sample_isi = SetKeyValueBox::new( account_id.clone(), diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 125d768c154..e78e04a335c 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -35,6 +35,8 @@ pub const WASM_MAIN_FN_NAME: &str = "_iroha_wasm_main"; pub const EXECUTE_ISI_FN_NAME: &str = "execute_instruction"; /// Name of the imported function to execute queries pub const EXECUTE_QUERY_FN_NAME: &str = "execute_query"; +/// Name of the imported function to debug print object +pub const DBG_FN_NAME: &str = "dbg"; /// `WebAssembly` execution error type #[derive(Debug, thiserror::Error)] @@ -312,7 +314,7 @@ impl<'wrld, W: WorldTrait> Runtime<'wrld, W> { /// # Warning /// /// This function doesn't take ownership of the provided allocation - /// but it does tranasfer ownership of the result to the caller + /// but it does transfer ownership of the result to the caller /// /// # Errors /// @@ -344,15 +346,36 @@ impl<'wrld, W: WorldTrait> Runtime<'wrld, W> { Ok(()) } + /// Host defined function which prints given string. When calling this function, module + /// serializes ISI to linear memory and provides offset and length as parameters + /// + /// # Warning + /// + /// This function doesn't take ownership of the provided allocation + /// + /// # Errors + /// + /// If string decoding fails + #[allow(clippy::print_stdout)] + fn dbg(mut caller: Caller>, offset: WasmUsize, len: WasmUsize) -> Result<(), Trap> { + let memory = Self::get_memory(&mut caller)?; + let string_mem_range = offset as usize..(offset + len) as usize; + let mut string_bytes = &memory.data(&caller)[string_mem_range]; + let s = String::decode(&mut string_bytes).map_err(|error| Trap::new(error.to_string()))?; + println!("{s}"); + Ok(()) + } + fn create_linker(engine: &Engine) -> Result>, Error> { let mut linker = Linker::new(engine); linker .func_wrap("iroha", EXECUTE_ISI_FN_NAME, Self::execute_instruction) + .and_then(|l| l.func_wrap("iroha", EXECUTE_QUERY_FN_NAME, Self::execute_query)) .map_err(Error::Initialization)?; linker - .func_wrap("iroha", EXECUTE_QUERY_FN_NAME, Self::execute_query) + .func_wrap("iroha", DBG_FN_NAME, Self::dbg) .map_err(Error::Initialization)?; Ok(linker) diff --git a/core/src/sumeragi/fault.rs b/core/src/sumeragi/fault.rs index 143fa6431f4..6d5e25a4987 100644 --- a/core/src/sumeragi/fault.rs +++ b/core/src/sumeragi/fault.rs @@ -752,7 +752,7 @@ impl } if let Err(error) = self.wsv.apply(block.clone()).await { - warn!(%error, %block_hash, "Failed to apply block on WSV"); + warn!(?error, %block_hash, "Failed to apply block on WSV"); } let previous_role = self.topology.role(&self.peer_id); self.topology.apply_block(block_hash); diff --git a/core/src/sumeragi/network_topology.rs b/core/src/sumeragi/network_topology.rs index a98f85f9f93..870779f72b6 100644 --- a/core/src/sumeragi/network_topology.rs +++ b/core/src/sumeragi/network_topology.rs @@ -361,7 +361,7 @@ impl Topology { /// Sorted peers that this topology has. pub fn sorted_peers(&self) -> &[PeerId] { - &self.sorted_peers[..] + &*self.sorted_peers } /// Block hash on which this topology is based. diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 5c776e7e5bb..9798cf60497 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -199,7 +199,8 @@ impl WorldStateView { })?; } Executable::Wasm(bytes) => { - let mut wasm_runtime = wasm::Runtime::new()?; + let mut wasm_runtime = + wasm::Runtime::from_configuration(self.config.wasm_runtime_config)?; wasm_runtime.execute(self, authority, bytes)?; } } @@ -853,6 +854,8 @@ pub mod config { use iroha_data_model::{metadata::Limits as MetadataLimits, LengthLimits}; use serde::{Deserialize, Serialize}; + use crate::smartcontracts::wasm; + const DEFAULT_METADATA_LIMITS: MetadataLimits = MetadataLimits::new(2_u32.pow(20), 2_u32.pow(12)); const DEFAULT_IDENT_LENGTH_LIMITS: LengthLimits = LengthLimits::new(1, 2_u32.pow(7)); @@ -872,6 +875,8 @@ pub mod config { pub domain_metadata_limits: MetadataLimits, /// [`LengthLimits`] for the number of chars in identifiers that can be stored in the WSV. pub ident_length_limits: LengthLimits, + /// [`WASM Runtime`](wasm::Runtime) configuration + pub wasm_runtime_config: wasm::config::Configuration, } impl Default for Configuration { @@ -882,6 +887,7 @@ pub mod config { account_metadata_limits: DEFAULT_METADATA_LIMITS, domain_metadata_limits: DEFAULT_METADATA_LIMITS, ident_length_limits: DEFAULT_IDENT_LENGTH_LIMITS, + wasm_runtime_config: wasm::config::Configuration::default(), } } } diff --git a/docs/source/references/config.md b/docs/source/references/config.md index 29efede4cf8..99c1da03540 100644 --- a/docs/source/references/config.md +++ b/docs/source/references/config.md @@ -97,6 +97,10 @@ The following is the default configuration used by Iroha. "IDENT_LENGTH_LIMITS": { "min": 1, "max": 128 + }, + "WASM_RUNTIME_CONFIG": { + "FUEL_LIMIT": 1000000, + "MAX_MEMORY": 524288000 } }, "NETWORK": { @@ -766,6 +770,10 @@ Has type `WorldStateViewConfiguration`. Can be configured via environment variab "IDENT_LENGTH_LIMITS": { "max": 128, "min": 1 + }, + "WASM_RUNTIME_CONFIG": { + "FUEL_LIMIT": 1000000, + "MAX_MEMORY": 524288000 } } ``` @@ -835,3 +843,16 @@ Has type `LengthLimits`. Can be configured via environment variable `WSV_IDENT_L } ``` +### `wsv.wasm_runtime_config` + +[`WASM Runtime`](wasm::Runtime) configuration + +Has type `wasm::config::Configuration`. Can be configured via environment variable `WSV_WASM_RUNTIME_CONFIG` + +```json +{ + "FUEL_LIMIT": 1000000, + "MAX_MEMORY": 524288000 +} +``` + diff --git a/futures/src/lib.rs b/futures/src/lib.rs index ea3dce5a6e2..e1977ffd206 100644 --- a/futures/src/lib.rs +++ b/futures/src/lib.rs @@ -122,8 +122,8 @@ impl Future for TelemetryFuture { let id = self.id; let now = Instant::now(); - // SAFETY: This is safe because `future` is a field of pinned structure and therefore is also pinned #[allow(unsafe_code)] + // SAFETY: This is safe because `future` is a field of pinned structure and therefore is also pinned let future = unsafe { self.map_unchecked_mut(|telemetry| &mut telemetry.future) }; let result = future.poll(cx); diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml index 62d2d21d0c0..e29e9acb699 100644 --- a/wasm/Cargo.toml +++ b/wasm/Cargo.toml @@ -17,6 +17,10 @@ members = [ "derive", ] +[features] +# Enables debugging tools such as `dbg()` and `DebugUnwrapExt` +debug = [] + [dependencies] iroha_data_model = { version = "=2.0.0-pre-rc.3", path = "../data_model", default-features = false } iroha_wasm_derive = { path = "derive" } diff --git a/wasm/derive/src/lib.rs b/wasm/derive/src/lib.rs index 6d236229694..92c1ef2ce1a 100644 --- a/wasm/derive/src/lib.rs +++ b/wasm/derive/src/lib.rs @@ -34,6 +34,7 @@ pub fn iroha_wasm(_: TokenStream, item: TokenStream) -> TokenStream { #fn_name(iroha_wasm::_decode_from_raw::(ptr, len)) } + #[allow(clippy::needless_pass_by_value)] #(#attrs)* #vis #sig #block diff --git a/wasm/src/debug.rs b/wasm/src/debug.rs new file mode 100644 index 00000000000..1f43fccaf51 --- /dev/null +++ b/wasm/src/debug.rs @@ -0,0 +1,56 @@ +use core::fmt::Debug; + +use super::*; + +/// Print `obj` in debug representation to the stdout +pub fn dbg(obj: &T) { + #[cfg(not(test))] + use host::dbg as host_dbg; + #[cfg(test)] + use tests::_dbg as host_dbg; + + let s = format!("{:?}", obj); + // Safety: `host_dbg` doesn't take ownership of it's pointer parameter + unsafe { encode_and_execute(&s, host_dbg) } +} + +/// Extension implemented for `Result` and `Option` to provide unwrapping with error message, +/// cause basic `unwrap()` does not print error due to specific panic handling in WASM Runtime +pub trait DebugUnwrapExt { + type Output; + + /// Just like `unwrap()` but prints error message before panic + fn dbg_unwrap(self) -> Self::Output; +} + +impl DebugUnwrapExt for Result { + type Output = T; + + #[allow(clippy::panic)] + fn dbg_unwrap(self) -> Self::Output { + match self { + Ok(out) => out, + Err(err) => { + dbg(&format!( + "WASM execution panicked at `called Result::dbg_unwrap()` on an `Err` value: {err:?}", + )); + panic!(""); + } + } + } +} + +impl DebugUnwrapExt for Option { + type Output = T; + + #[allow(clippy::panic)] + fn dbg_unwrap(self) -> Self::Output { + match self { + Some(out) => out, + None => { + dbg("WASM execution panicked at 'called `Option::dbg_unwrap()` on a `None` value'"); + panic!(""); + } + } + } +} diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs index db9f6d1a4c8..a712cfc3074 100644 --- a/wasm/src/lib.rs +++ b/wasm/src/lib.rs @@ -21,6 +21,11 @@ pub use iroha_data_model as data_model; pub use iroha_wasm_derive::iroha_wasm; use parity_scale_codec::{Decode, Encode}; +#[cfg(feature = "debug")] +mod debug; +#[cfg(feature = "debug")] +pub use debug::*; + #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; @@ -65,6 +70,7 @@ impl Execute for data_model::isi::Instruction { unsafe { encode_and_execute(self, host_execute_instruction) }; } } + impl Execute for data_model::query::QueryBox { type Result = Value; @@ -112,6 +118,16 @@ mod host { /// This function doesn't take ownership of the provided allocation /// but it does transfer ownership of the result to the caller pub(super) fn execute_instruction(ptr: WasmUsize, len: WasmUsize); + + /// Prints string to the standard output by providing offset and length + /// into WebAssembly's linear memory where string is stored + /// + /// # Warning + /// + /// This function doesn't take ownership of the provided allocation + /// but it does transfer ownership of the result to the caller + #[cfg(feature = "debug")] + pub(super) fn dbg(ptr: WasmUsize, len: WasmUsize); } } @@ -265,6 +281,10 @@ mod tests { assert_eq!(get_test_instruction(), instruction.unwrap()); } + #[cfg(feature = "debug")] + #[no_mangle] + pub(super) unsafe extern "C" fn _dbg(_ptr: WasmUsize, _len: WasmUsize) {} + #[no_mangle] pub(super) unsafe extern "C" fn _iroha_wasm_execute_query_mock( ptr: WasmUsize,