Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Upgrade Pallas to fix tx hash mismatch #312

Merged
merged 2 commits into from
Jun 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 25 additions & 22 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ authors = ["Santiago Carmuega <santiago@carmuega.me>"]


[dependencies]
pallas = "0.9.1"
pallas = "0.10.0"
# pallas = { path = "../pallas/pallas" }
hex = "0.4.3"
net2 = "0.2.37"
Expand Down
5 changes: 3 additions & 2 deletions src/mapper/collect.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use pallas::ledger::primitives::alonzo::{
AuxiliaryData, Block, Multiasset, TransactionInput, TransactionOutput, Value,
use pallas::ledger::primitives::{
alonzo::{AuxiliaryData, Block, Multiasset, TransactionInput, TransactionOutput, Value},
ToHash,
};

use crate::{
Expand Down
2 changes: 1 addition & 1 deletion src/mapper/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use pallas::ledger::primitives::alonzo::{
TransactionOutput, Value,
};
use pallas::ledger::primitives::alonzo::{NetworkId, TransactionBody, TransactionBodyComponent};
use pallas::ledger::primitives::ToCanonicalJson;
use pallas::ledger::primitives::{ToCanonicalJson, ToHash};

use pallas::network::miniprotocols::Point;
use serde_json::{json, Value as JsonValue};
Expand Down
16 changes: 8 additions & 8 deletions src/mapper/shelley.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use pallas::ledger::primitives::Fragment;
use pallas::ledger::primitives::{Fragment, ToHash};

use pallas::ledger::primitives::alonzo::{
self, crypto, AuxiliaryData, Block, Certificate, Metadata, Multiasset, TransactionBody,
self, AuxiliaryData, Block, Certificate, Metadata, Multiasset, TransactionBody,
TransactionBodyComponent, TransactionInput, TransactionOutput, TransactionWitnessSet, Value,
};

Expand Down Expand Up @@ -272,7 +272,7 @@ impl EventWriter {

#[deprecated(note = "use crawl_from_shelley_cbor instead")]
pub fn crawl_with_cbor(&self, block: &Block, cbor: &[u8]) -> Result<(), Error> {
let hash = crypto::hash_block_header(&block.header);
let hash = block.header.to_hash();

let child = self.child_writer(EventContext {
block_hash: Some(hex::encode(&hash)),
Expand All @@ -287,7 +287,7 @@ impl EventWriter {

#[deprecated(note = "use crawl_from_shelley_cbor instead")]
pub fn crawl(&self, block: &Block) -> Result<(), Error> {
let hash = crypto::hash_block_header(&block.header);
let hash = block.header.to_hash();

let child = self.child_writer(EventContext {
block_hash: Some(hex::encode(&hash)),
Expand All @@ -305,13 +305,13 @@ impl EventWriter {
/// Entry-point to start crawling a blocks for events. Meant to be used when
/// we already have a decoded block (for example, N2C). The raw CBOR is also
/// passed through in case we need to attach it to outbound events.
pub fn crawl_shelley_with_cbor(
pub fn crawl_shelley_with_cbor<'b>(
&self,
block: &Block,
cbor: &[u8],
block: &'b Block,
cbor: &'b [u8],
era: Era,
) -> Result<(), Error> {
let hash = crypto::hash_block_header(&block.header);
let hash = block.header.to_hash();

let child = self.child_writer(EventContext {
block_hash: Some(hex::encode(&hash)),
Expand Down
40 changes: 13 additions & 27 deletions src/sources/common.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
use core::fmt;
use std::{net::TcpStream, ops::Deref, str::FromStr, time::Duration};
use std::{ops::Deref, str::FromStr, time::Duration};

#[cfg(target_family = "unix")]
use std::os::unix::net::UnixStream;

use log::info;
use net2::TcpStreamExt;
use pallas::network::{
miniprotocols::{chainsync::TipFinder, run_agent, Point, MAINNET_MAGIC, TESTNET_MAGIC},
multiplexer::{Channel, Multiplexer},
multiplexer::{bearers::Bearer, StdChannel, StdPlexer},
};
use serde::{de::Visitor, Deserializer};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -151,45 +146,36 @@ pub struct RetryPolicy {
connection_max_backoff: u32,
}

pub fn setup_multiplexer_attempt(
bearer: &BearerKind,
address: &str,
protocols: &[u16],
) -> Result<Multiplexer, Error> {
pub fn setup_multiplexer_attempt(bearer: &BearerKind, address: &str) -> Result<StdPlexer, Error> {
match bearer {
BearerKind::Tcp => {
let tcp = TcpStream::connect(address)?;
tcp.set_nodelay(true)?;
tcp.set_keepalive_ms(Some(30_000u32))?;

Multiplexer::setup(tcp, protocols)
let bearer = Bearer::connect_tcp(address)?;
Ok(StdPlexer::new(bearer))
}
#[cfg(target_family = "unix")]
BearerKind::Unix => {
let unix = UnixStream::connect(address)?;

Multiplexer::setup(unix, protocols)
let unix = Bearer::connect_unix(address)?;
Ok(StdPlexer::new(unix))
}
}
}

pub fn setup_multiplexer(
bearer: &BearerKind,
address: &str,
protocols: &[u16],
retry: &Option<RetryPolicy>,
) -> Result<Multiplexer, Error> {
) -> Result<StdPlexer, Error> {
match retry {
Some(policy) => retry::retry_operation(
|| setup_multiplexer_attempt(bearer, address, protocols),
|| setup_multiplexer_attempt(bearer, address),
&retry::Policy {
max_retries: policy.connection_max_retries,
backoff_unit: Duration::from_secs(1),
backoff_factor: 2,
max_backoff: Duration::from_secs(policy.connection_max_backoff as u64),
},
),
None => setup_multiplexer_attempt(bearer, address, protocols),
None => setup_multiplexer_attempt(bearer, address),
}
}

Expand Down Expand Up @@ -241,7 +227,7 @@ pub fn should_finalize(
}

pub(crate) fn find_end_of_chain(
channel: &mut Channel,
channel: &mut StdChannel,
well_known: &ChainWellKnownInfo,
) -> Result<Point, crate::Error> {
let point = Point::Specific(
Expand All @@ -251,7 +237,7 @@ pub(crate) fn find_end_of_chain(

let agent = TipFinder::initial(point);
let agent = run_agent(agent, channel)?;
info!("chain point query output: {:?}", agent.output);
log::info!("chain point query output: {:?}", agent.output);

match agent.output {
Some(tip) => Ok(tip.0),
Expand All @@ -263,7 +249,7 @@ pub(crate) fn define_start_point(
intersect: &Option<IntersectArg>,
since: &Option<PointArg>,
utils: &Utils,
cs_channel: &mut Channel,
cs_channel: &mut StdChannel,
) -> Result<Option<Vec<Point>>, Error> {
let cursor = utils.get_cursor_if_any();

Expand Down
58 changes: 33 additions & 25 deletions src/sources/n2c/blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,50 +2,58 @@ use std::ops::Deref;

use pallas::{
codec::minicbor::decode,
ledger::primitives::{alonzo, byron, probing, Era},
network::miniprotocols::{chainsync::BlockContent, Point},
ledger::primitives::{alonzo, byron, probing, Era, ToHash},
network::miniprotocols::Point,
};

use crate::Error;

#[derive(Debug)]
pub(crate) enum MultiEraBlock {
Byron(Box<byron::Block>),
AlonzoCompatible(Box<alonzo::Block>, Era),
}

impl TryFrom<BlockContent> for MultiEraBlock {
type Error = Error;
pub(crate) struct CborHolder(Vec<u8>);

fn try_from(value: BlockContent) -> Result<Self, Self::Error> {
let bytes = value.deref();
impl<'b> CborHolder {
pub fn new(bytes: Vec<u8>) -> Self {
Self(bytes)
}

match probing::probe_block_cbor_era(bytes) {
pub fn parse(&'b self) -> Result<MultiEraBlock<'b>, Error> {
let block = match probing::probe_block_cbor_era(&self.0) {
probing::Outcome::Matched(era) => match era {
pallas::ledger::primitives::Era::Byron => {
let block = decode(bytes)?;
Ok(MultiEraBlock::Byron(Box::new(block)))
let block = decode(&self.0)?;
MultiEraBlock::Byron(Box::new(block))
}
_ => {
let alonzo::BlockWrapper(_, block) = decode(bytes)?;
Ok(MultiEraBlock::AlonzoCompatible(Box::new(block), era))
let alonzo::BlockWrapper(_, block) = decode(&self.0)?;
MultiEraBlock::AlonzoCompatible(Box::new(block), era)
}
},
// TODO: we're assuming that the genesis block is Byron-compatible. Is this a safe
// TODO: we're assuming that the geenesis block is Byron-compatible. Is this a safe
// assumption?
probing::Outcome::GenesisBlock => {
let block = decode(bytes)?;
Ok(MultiEraBlock::Byron(Box::new(block)))
let block = decode(&self.0)?;
MultiEraBlock::Byron(Box::new(block))
}
probing::Outcome::Inconclusive => {
log::error!("CBOR hex for debugging: {}", hex::encode(bytes));
Err("can't infer primitive block from cbor, inconclusive probing".into())
log::error!("CBOR hex for debugging: {}", hex::encode(&self.0));
return Err("can't infer primitive block from cbor, inconclusive probing".into());
}
}
};

Ok(block)
}

pub fn cbor(&'b self) -> &'b [u8] {
&self.0
}
}

#[derive(Debug)]
pub(crate) enum MultiEraBlock<'b> {
Byron(Box<byron::Block>),
AlonzoCompatible(Box<alonzo::Block<'b>>, Era),
}

impl MultiEraBlock {
impl MultiEraBlock<'_> {
pub(crate) fn read_cursor(&self) -> Result<Point, Error> {
match self {
MultiEraBlock::Byron(x) => match x.deref() {
Expand All @@ -61,7 +69,7 @@ impl MultiEraBlock {
}
},
MultiEraBlock::AlonzoCompatible(x, _) => {
let hash = alonzo::crypto::hash_block_header(&x.header);
let hash = x.header.to_hash();
Ok(Point::Specific(x.header.header_body.slot, hash.to_vec()))
}
}
Expand Down
Loading