Skip to content

Commit

Permalink
Enhance TxDatum generation
Browse files Browse the repository at this point in the history
  • Loading branch information
jpraynaud committed Feb 17, 2023
1 parent 310eedc commit c58daba
Show file tree
Hide file tree
Showing 3 changed files with 130 additions and 27 deletions.
4 changes: 3 additions & 1 deletion mithril-common/src/chain_observer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,6 @@ pub use cli_observer::{CardanoCliChainObserver, CardanoCliRunner};
#[cfg(any(test, feature = "test_only"))]
pub use fake_observer::FakeObserver;
pub use interface::{ChainObserver, ChainObserverError};
pub use model::{ChainAddress, TxDatum};
pub use model::{
ChainAddress, TxDatum, TxDatumBuilder, TxDatumError, TxDatumFieldTypeName, TxDatumFieldValue,
};
132 changes: 113 additions & 19 deletions mithril-common/src/chain_observer/model.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,33 @@
use serde::Serialize;
use serde_json::Value;
use std::{collections::HashMap, error::Error as StdError};
use strum_macros::Display;
use thiserror::Error;

/// [ChainAddress] represents an on chain address
/// [ChainAddress] represents an on chain address.
pub type ChainAddress = String;

/// [TxDatum] related errors.
#[derive(Debug, Error)]
pub enum TxDatumError {
/// Generic [TxDatum] error.
#[error("general error {0}")]
_General(Box<dyn StdError + Sync + Send>),

/// Error raised when the content could not be parsed.
#[error("could not parse content: {0}")]
#[error("could not parse tx datum: {0}")]
InvalidContent(Box<dyn StdError + Sync + Send>),

/// Error raised when building the tx datum failed.
#[error("could not build tx datum: {0}")]
Build(serde_json::Error),
}

/// [TxDatum] represents transaction Datum
/// [TxDatum] represents transaction Datum.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TxDatum(pub String);

impl TxDatum {
/// Retrieves the nth field of the datum with given type
pub fn get_nth_field_by_type(
&self,
type_name: &str,
type_name: &TxDatumFieldTypeName,
index: usize,
) -> Result<Value, Box<dyn StdError>> {
let tx_datum_raw = &self.0;
Expand All @@ -48,7 +50,7 @@ impl TxDatum {
// 3- Filter the vec (keep the ones that match the given type), and retrieve the nth entry of this filtered vec
let field_value = fields
.iter()
.filter(|&field| field.get(type_name).is_some())
.filter(|&field| field.get(type_name.to_string()).is_some())
.nth(index)
.ok_or_else(|| {
TxDatumError::InvalidContent(
Expand All @@ -58,19 +60,111 @@ impl TxDatum {
.into(),
)
})?
.get(type_name)
.get(type_name.to_string())
.unwrap();

Ok(field_value.to_owned())
}
}

/// [TxDatumFieldTypeName] represents a fiel type name of TxDatum.
#[derive(Debug, Serialize, Hash, PartialEq, Eq, Display)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
pub enum TxDatumFieldTypeName {
/// Bytes datum field type name.
Bytes,
/// Integer datum field type name
#[allow(dead_code)]
Int,
}

/// [TxDatumFieldValue] represents a fiel value of TxDatum.
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum TxDatumFieldValue {
/// Bytes datum field value.
Bytes(String),
/// Integer datum field value
#[allow(dead_code)]
Int(u32),
}

/// [TxDatumBuilder] is a [TxDatum] builder utility.
#[derive(Debug, Serialize)]
pub struct TxDatumBuilder {
constructor: usize,
fields: Vec<HashMap<TxDatumFieldTypeName, TxDatumFieldValue>>,
}

impl TxDatumBuilder {
/// [TxDatumBuilder] factory
pub fn new() -> Self {
Self {
constructor: 0,
fields: Vec::new(),
}
}

/// Add a field to the builder
pub fn add_field(
&mut self,
field_type: TxDatumFieldTypeName,
field_value: TxDatumFieldValue,
) -> &mut TxDatumBuilder {
let mut field = HashMap::new();
field.insert(field_type, field_value);
self.fields.push(field);

self
}

/// Build a [TxDatum]
pub fn build(&self) -> Result<TxDatum, TxDatumError> {
Ok(TxDatum(
serde_json::to_string(&self).map_err(TxDatumError::Build)?,
))
}
}

impl Default for TxDatumBuilder {
fn default() -> Self {
Self::new()
}
}

#[cfg(test)]
mod test {
use super::*;

fn dummy_tx_datum() -> TxDatum {
TxDatum("{\"constructor\":0,\"fields\":[{\"bytes\":\"bytes0\"}, {\"int\":0}, {\"int\":1}, {\"bytes\":\"bytes1\"}, {\"bytes\":\"bytes2\"}, {\"int\":2}]}".to_string())
let mut tx_datum_builder = TxDatumBuilder::new();
let tx_datum = tx_datum_builder
.add_field(
TxDatumFieldTypeName::Bytes,
TxDatumFieldValue::Bytes("bytes0".to_string()),
)
.add_field(TxDatumFieldTypeName::Int, TxDatumFieldValue::Int(0))
.add_field(TxDatumFieldTypeName::Int, TxDatumFieldValue::Int(1))
.add_field(
TxDatumFieldTypeName::Bytes,
TxDatumFieldValue::Bytes("bytes1".to_string()),
)
.add_field(
TxDatumFieldTypeName::Bytes,
TxDatumFieldValue::Bytes("bytes2".to_string()),
)
.add_field(TxDatumFieldTypeName::Int, TxDatumFieldValue::Int(2))
.build()
.expect("tx_datum build should not fail");
tx_datum
}

#[test]
fn test_build_tx_datum() {
let tx_datum = dummy_tx_datum();
let tx_datum_expected = TxDatum("{\"constructor\":0,\"fields\":[{\"bytes\":\"bytes0\"},{\"int\":0},{\"int\":1},{\"bytes\":\"bytes1\"},{\"bytes\":\"bytes2\"},{\"int\":2}]}".to_string());
assert_eq!(tx_datum_expected, tx_datum);
}

#[test]
Expand All @@ -79,29 +173,29 @@ mod test {
assert_eq!(
"bytes0",
tx_datum
.get_nth_field_by_type("bytes", 0)
.get_nth_field_by_type(&TxDatumFieldTypeName::Bytes, 0)
.unwrap()
.as_str()
.unwrap()
);
assert_eq!(
"bytes1",
tx_datum
.get_nth_field_by_type("bytes", 1)
.get_nth_field_by_type(&TxDatumFieldTypeName::Bytes, 1)
.unwrap()
.as_str()
.unwrap()
);
assert_eq!(
"bytes2",
tx_datum
.get_nth_field_by_type("bytes", 2)
.get_nth_field_by_type(&TxDatumFieldTypeName::Bytes, 2)
.unwrap()
.as_str()
.unwrap()
);
tx_datum
.get_nth_field_by_type("bytes", 100)
.get_nth_field_by_type(&TxDatumFieldTypeName::Bytes, 100)
.expect_err("should have returned an error");
}

Expand All @@ -111,29 +205,29 @@ mod test {
assert_eq!(
0,
tx_datum
.get_nth_field_by_type("int", 0)
.get_nth_field_by_type(&TxDatumFieldTypeName::Int, 0)
.unwrap()
.as_u64()
.unwrap()
);
assert_eq!(
1,
tx_datum
.get_nth_field_by_type("int", 1)
.get_nth_field_by_type(&TxDatumFieldTypeName::Int, 1)
.unwrap()
.as_u64()
.unwrap()
);
assert_eq!(
2,
tx_datum
.get_nth_field_by_type("int", 2)
.get_nth_field_by_type(&TxDatumFieldTypeName::Int, 2)
.unwrap()
.as_u64()
.unwrap()
);
tx_datum
.get_nth_field_by_type("int", 100)
.get_nth_field_by_type(&TxDatumFieldTypeName::Int, 100)
.expect_err("should have returned an error");
}
}
21 changes: 14 additions & 7 deletions mithril-common/src/era/adapters/cardano_chain.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
chain_observer::{ChainAddress, ChainObserver},
chain_observer::{ChainAddress, ChainObserver, TxDatumFieldTypeName},
crypto_helper::{
key_decode_hex, EraMarkersSigner, EraMarkersVerifier, EraMarkersVerifierSignature,
EraMarkersVerifierVerificationKey,
Expand Down Expand Up @@ -127,7 +127,11 @@ impl EraReaderAdapter for CardanoChainAdapter {
.await?;
let markers_list = tx_datums
.into_iter()
.filter_map(|datum| datum.get_nth_field_by_type("bytes", 0).ok())
.filter_map(|datum| {
datum
.get_nth_field_by_type(&TxDatumFieldTypeName::Bytes, 0)
.ok()
})
.filter_map(|field_value| field_value.as_str().map(|s| s.to_string()))
.filter_map(|field_value_str| key_decode_hex(&field_value_str).ok())
.filter_map(|era_markers_payload: EraMarkersPayload| {
Expand All @@ -144,7 +148,7 @@ impl EraReaderAdapter for CardanoChainAdapter {

#[cfg(test)]
mod test {
use crate::chain_observer::{FakeObserver, TxDatum};
use crate::chain_observer::{FakeObserver, TxDatum, TxDatumBuilder, TxDatumFieldValue};
use crate::crypto_helper::{key_encode_hex, EraMarkersSigner};
use crate::entities::Epoch;

Expand All @@ -154,10 +158,13 @@ mod test {
payloads
.into_iter()
.map(|payload| {
TxDatum(format!(
"{{\"constructor\":0,\"fields\":[{{\"bytes\":\"{}\"}}]}}",
key_encode_hex(payload).unwrap()
))
TxDatumBuilder::new()
.add_field(
TxDatumFieldTypeName::Bytes,
TxDatumFieldValue::Bytes(key_encode_hex(payload).unwrap()),
)
.build()
.unwrap()
})
.collect()
}
Expand Down

0 comments on commit c58daba

Please sign in to comment.