From e8e0b08f17014a3f94734cfc7208d3bc493f756a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Silva?= Date: Fri, 24 Aug 2018 01:14:01 +0100 Subject: [PATCH 01/15] ethcore: kovan: delay activation of strict score validation (#9406) --- ethcore/res/ethereum/kovan.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethcore/res/ethereum/kovan.json b/ethcore/res/ethereum/kovan.json index 1268de55f26..47f80082f17 100644 --- a/ethcore/res/ethereum/kovan.json +++ b/ethcore/res/ethereum/kovan.json @@ -19,7 +19,7 @@ "0x00a0a24b9f0e5ec7aa4c7389b8302fd0123194de" ] }, - "validateScoreTransition": 1000000, + "validateScoreTransition": 4301764, "validateStepTransition": 1500000, "maximumUncleCountTransition": 5067000, "maximumUncleCount": 0 From 31291ebd3563208afbd40204f1ab74418e05abae Mon Sep 17 00:00:00 2001 From: JohnnySheffield Date: Fri, 24 Aug 2018 05:55:54 +0200 Subject: [PATCH 02/15] nonroot CentOS Docker image (#9280) * Updates CentOS Docker image build process * rename build.Dockerfile --- docker/README.md | 41 +++++++++++++++++++++++++++++- docker/centos/Dockerfile | 46 ++++++++++++++-------------------- docker/centos/Dockerfile.build | 25 ++++++++++++++++++ docker/centos/build.sh | 29 +++++++++++++++++++++ 4 files changed, 113 insertions(+), 28 deletions(-) create mode 100644 docker/centos/Dockerfile.build create mode 100755 docker/centos/build.sh diff --git a/docker/README.md b/docker/README.md index 3b79e8dd754..b2f8374b59e 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,3 +1,42 @@ -Usage +## Usage ```docker build -f docker/ubuntu/Dockerfile --tag ethcore/parity:branch_or_tag_name .``` + +## Usage - CentOS + +Builds a lightweight non-root Parity docker image: + +``` +git clone https://github.com/paritytech/parity-ethereum.git +cd parity-ethereum +./docker/centos/build.sh +``` + +Fully customised build: +``` +PARITY_IMAGE_REPO=my-personal/parity \ +PARITY_BUILDER_IMAGE_TAG=build-latest \ +PARITY_RUNNER_IMAGE_TAG=centos-parity-experimental \ +./docker/centos/build.sh +``` + + +Default values: +``` +# The image name +PARITY_IMAGE_REPO - parity/parity + +# The tag to be used for builder image, git commit sha will be appended +PARITY_BUILDER_IMAGE_TAG - build + +# The tag to be used for runner image +PARITY_RUNNER_IMAGE_TAG - latest +``` + +All default ports you might use will be exposed: +``` +# secret +# ipfs store ui rpc ws listener discovery +# ↓ ↓ ↓ ↓ ↓ ↓ ↓ +EXPOSE 5001 8082 8083 8180 8545 8546 30303/tcp 30303/udp +``` diff --git a/docker/centos/Dockerfile b/docker/centos/Dockerfile index 7c944001e2c..22a98c003ae 100644 --- a/docker/centos/Dockerfile +++ b/docker/centos/Dockerfile @@ -1,36 +1,28 @@ FROM centos:latest -WORKDIR /build -# install tools and dependencies -RUN yum -y update&& \ - yum install -y git make gcc-c++ gcc file binutils cmake +RUN mkdir -p /opt/parity/data && \ + chmod g+rwX /opt/parity/data && \ + mkdir -p /opt/parity/release -# install rustup -RUN curl -sSf https://static.rust-lang.org/rustup.sh -o rustup.sh &&\ -ls&&\ - sh rustup.sh --disable-sudo +COPY parity/parity /opt/parity/release -# show backtraces -ENV RUST_BACKTRACE 1 +WORKDIR /opt/parity/data -# set compiler -ENV CXX g++ -ENV CC gcc +# exposing default ports +# +# secret +# ipfs store ui rpc ws listener discovery +# ↓ ↓ ↓ ↓ ↓ ↓ ↓ +EXPOSE 5001 8082 8083 8180 8545 8546 30303/tcp 30303/udp -# show tools -RUN rustc -vV && \ -cargo -V && \ -gcc -v &&\ -g++ -v +# switch to non-root user +USER 1001 -# build parity -ADD . /build/parity -RUN cd parity&&\ - cargo build --release --verbose && \ - ls /build/parity/target/release/parity && \ - strip /build/parity/target/release/parity +#if no base path provided, assume it's current workdir +CMD ["--base-path","."] +ENTRYPOINT ["/opt/parity/release/parity"] + + + -RUN file /build/parity/target/release/parity -EXPOSE 8080 8545 8180 -ENTRYPOINT ["/build/parity/target/release/parity"] diff --git a/docker/centos/Dockerfile.build b/docker/centos/Dockerfile.build new file mode 100644 index 00000000000..454af403a03 --- /dev/null +++ b/docker/centos/Dockerfile.build @@ -0,0 +1,25 @@ +FROM centos:latest + +WORKDIR /build + +ADD . /build/parity-ethereum + +RUN yum -y update && \ + yum install -y systemd-devel git make gcc-c++ gcc file binutils && \ + curl -L "https://cmake.org/files/v3.12/cmake-3.12.0-Linux-x86_64.tar.gz" -o cmake.tar.gz && \ + tar -xzf cmake.tar.gz && \ + cp -r cmake-3.12.0-Linux-x86_64/* /usr/ && \ + curl https://sh.rustup.rs -sSf | sh -s -- -y && \ + PATH=/root/.cargo/bin:$PATH && \ + RUST_BACKTRACE=1 && \ + rustc -vV && \ + cargo -V && \ + gcc -v && \ + g++ -v && \ + cmake --version && \ + cd parity-ethereum && \ + cargo build --verbose --release --features final && \ + strip /build/parity-ethereum/target/release/parity && \ + file /build/parity-ethereum/target/release/parity + + diff --git a/docker/centos/build.sh b/docker/centos/build.sh new file mode 100755 index 00000000000..7215e745f0e --- /dev/null +++ b/docker/centos/build.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env sh + +# The image name +PARITY_IMAGE_REPO=${PARITY_IMAGE_REPO:-parity/parity} +# The tag to be used for builder image +PARITY_BUILDER_IMAGE_TAG=${PARITY_BUILDER_IMAGE_TAG:-build} +# The tag to be used for runner image +PARITY_RUNNER_IMAGE_TAG=${PARITY_RUNNER_IMAGE_TAG:-latest} + +echo Building $PARITY_IMAGE_REPO:$PARITY_BUILDER_IMAGE_TAG-$(git log -1 --format="%H") +docker build --no-cache -t $PARITY_IMAGE_REPO:$PARITY_BUILDER_IMAGE_TAG-$(git log -1 --format="%H") . -f docker/centos/Dockerfile.build + +echo Creating $PARITY_BUILDER_IMAGE_TAG-$(git log -1 --format="%H"), extracting binary +docker create --name extract $PARITY_IMAGE_REPO:$PARITY_BUILDER_IMAGE_TAG-$(git log -1 --format="%H") +mkdir docker/centos/parity +docker cp extract:/build/parity-ethereum/target/release/parity docker/centos/parity + +echo Building $PARITY_IMAGE_REPO:$PARITY_RUNNER_IMAGE_TAG +docker build --no-cache -t $PARITY_IMAGE_REPO:$PARITY_RUNNER_IMAGE_TAG docker/centos/ -f docker/centos/Dockerfile + +echo Cleaning up ... +rm -rf docker/centos/parity +docker rm -f extract +docker rmi -f $PARITY_IMAGE_REPO:$PARITY_BUILDER_IMAGE_TAG-$(git log -1 --format="%H") + +echo Echoing Parity version: +docker run $PARITY_IMAGE_REPO:$PARITY_RUNNER_IMAGE_TAG --version + +echo Done. From 0b34579b044c36b3c0969b16bb38d94f662a1eef Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 24 Aug 2018 09:42:24 +0100 Subject: [PATCH 03/15] Prevent sync restart if import queue full (#9381) --- ethcore/src/client/client.rs | 36 +++++++++++----------------------- ethcore/src/error.rs | 19 ++++++++++++++++++ ethcore/sync/src/block_sync.rs | 6 +++++- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/ethcore/src/client/client.rs b/ethcore/src/client/client.rs index 84bcddfd6db..e8707d6eb7c 100644 --- a/ethcore/src/client/client.rs +++ b/ethcore/src/client/client.rs @@ -16,7 +16,6 @@ use std::collections::{HashSet, BTreeMap, VecDeque}; use std::cmp; -use std::fmt; use std::str::FromStr; use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering as AtomicOrdering}; use std::sync::{Arc, Weak}; @@ -50,13 +49,16 @@ use client::{ }; use encoded; use engines::{EthEngine, EpochTransition, ForkChoice}; -use error::{ImportErrorKind, BlockImportErrorKind, ExecutionError, CallError, BlockError, ImportResult, Error as EthcoreError}; +use error::{ + ImportErrorKind, BlockImportErrorKind, ExecutionError, CallError, BlockError, ImportResult, + QueueError, QueueErrorKind, Error as EthcoreError +}; use vm::{EnvInfo, LastHashes}; use evm::Schedule; use executive::{Executive, Executed, TransactOptions, contract_address}; use factory::{Factories, VmFactory}; use header::{BlockNumber, Header, ExtendedHeader}; -use io::{IoChannel, IoError}; +use io::IoChannel; use log_entry::LocalizedLogEntry; use miner::{Miner, MinerService}; use ethcore_miner::pool::VerifiedTransaction; @@ -2095,7 +2097,7 @@ impl IoClient for Client { let queued = self.queued_ancient_blocks.clone(); let lock = self.ancient_blocks_import_lock.clone(); - match self.queue_ancient_blocks.queue(&self.io_channel.read(), 1, move |client| { + self.queue_ancient_blocks.queue(&self.io_channel.read(), 1, move |client| { trace_time!("import_ancient_block"); // Make sure to hold the lock here to prevent importing out of order. // We use separate lock, cause we don't want to block queueing. @@ -2119,10 +2121,9 @@ impl IoClient for Client { break; } } - }) { - Ok(_) => Ok(hash), - Err(e) => bail!(BlockImportErrorKind::Other(format!("{}", e))), - } + })?; + + Ok(hash) } fn queue_consensus_message(&self, message: Bytes) { @@ -2538,21 +2539,6 @@ mod tests { } } -#[derive(Debug)] -enum QueueError { - Channel(IoError), - Full(usize), -} - -impl fmt::Display for QueueError { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match *self { - QueueError::Channel(ref c) => fmt::Display::fmt(c, fmt), - QueueError::Full(limit) => write!(fmt, "The queue is full ({})", limit), - } - } -} - /// Queue some items to be processed by IO client. struct IoChannelQueue { currently_queued: Arc, @@ -2571,7 +2557,7 @@ impl IoChannelQueue { F: Fn(&Client) + Send + Sync + 'static, { let queue_size = self.currently_queued.load(AtomicOrdering::Relaxed); - ensure!(queue_size < self.limit, QueueError::Full(self.limit)); + ensure!(queue_size < self.limit, QueueErrorKind::Full(self.limit)); let currently_queued = self.currently_queued.clone(); let result = channel.send(ClientIoMessage::execute(move |client| { @@ -2584,7 +2570,7 @@ impl IoChannelQueue { self.currently_queued.fetch_add(count, AtomicOrdering::SeqCst); Ok(()) }, - Err(e) => Err(QueueError::Channel(e)), + Err(e) => bail!(QueueErrorKind::Channel(e)), } } } diff --git a/ethcore/src/error.rs b/ethcore/src/error.rs index 75b2b517503..29ef8961296 100644 --- a/ethcore/src/error.rs +++ b/ethcore/src/error.rs @@ -150,6 +150,24 @@ impl error::Error for BlockError { } } +error_chain! { + types { + QueueError, QueueErrorKind, QueueErrorResultExt, QueueErrorResult; + } + + errors { + #[doc = "Queue is full"] + Full(limit: usize) { + description("Queue is full") + display("The queue is full ({})", limit) + } + } + + foreign_links { + Channel(IoError) #[doc = "Io channel error"]; + } +} + error_chain! { types { ImportError, ImportErrorKind, ImportErrorResultExt, ImportErrorResult; @@ -183,6 +201,7 @@ error_chain! { links { Import(ImportError, ImportErrorKind) #[doc = "Import error"]; + Queue(QueueError, QueueErrorKind) #[doc = "Io channel queue error"]; } foreign_links { diff --git a/ethcore/sync/src/block_sync.rs b/ethcore/sync/src/block_sync.rs index 4c229cd87ae..5dd1bdac244 100644 --- a/ethcore/sync/src/block_sync.rs +++ b/ethcore/sync/src/block_sync.rs @@ -25,7 +25,7 @@ use ethereum_types::H256; use rlp::{self, Rlp}; use ethcore::header::BlockNumber; use ethcore::client::{BlockStatus, BlockId, BlockImportError, BlockImportErrorKind}; -use ethcore::error::{ImportErrorKind, BlockError}; +use ethcore::error::{ImportErrorKind, QueueErrorKind, BlockError}; use sync_io::SyncIo; use blocks::{BlockCollection, SyncBody, SyncHeader}; @@ -513,6 +513,10 @@ impl BlockDownloader { debug!(target: "sync", "Block temporarily invalid, restarting sync"); break; }, + Err(BlockImportError(BlockImportErrorKind::Queue(QueueErrorKind::Full(limit)), _)) => { + debug!(target: "sync", "Block import queue full ({}), restarting sync", limit); + break; + }, Err(e) => { debug!(target: "sync", "Bad block {:?} : {:?}", h, e); bad = true; From b87c7cac5468649973fc8672f120052357062385 Mon Sep 17 00:00:00 2001 From: Marek Kotewicz Date: Fri, 24 Aug 2018 11:53:31 +0200 Subject: [PATCH 04/15] block view! removal in progress (#9397) --- ethcore/src/block.rs | 24 ++++++++-------- ethcore/src/verification/verification.rs | 24 ++++++++-------- ethcore/sync/src/blocks.rs | 36 ++++++++++++------------ ethcore/sync/src/chain/supplier.rs | 10 +++---- ethcore/sync/src/lib.rs | 1 - 5 files changed, 47 insertions(+), 48 deletions(-) diff --git a/ethcore/src/block.rs b/ethcore/src/block.rs index fc2873ba3b9..00da9fa9f99 100644 --- a/ethcore/src/block.rs +++ b/ethcore/src/block.rs @@ -637,10 +637,11 @@ mod tests { use ethereum_types::Address; use std::sync::Arc; use transaction::SignedTransaction; + use verification::queue::kind::blocks::Unverified; /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header fn enact_bytes( - block_bytes: &[u8], + block_bytes: Vec, engine: &EthEngine, tracing: bool, db: StateDB, @@ -648,10 +649,10 @@ mod tests { last_hashes: Arc, factories: Factories, ) -> Result { - let block = view!(BlockView, block_bytes); - let header = block.header(); + let block = Unverified::from_rlp(block_bytes)?; + let header = block.header; let transactions: Result, Error> = block - .transactions() + .transactions .into_iter() .map(SignedTransaction::new) .map(|r| r.map_err(Into::into)) @@ -683,8 +684,8 @@ mod tests { b.populate_from(&header); b.push_transactions(transactions)?; - for u in &block.uncles() { - b.push_uncle(u.clone())?; + for u in block.uncles { + b.push_uncle(u)?; } b.close_and_lock() @@ -692,7 +693,7 @@ mod tests { /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards fn enact_and_seal( - block_bytes: &[u8], + block_bytes: Vec, engine: &EthEngine, tracing: bool, db: StateDB, @@ -700,8 +701,9 @@ mod tests { last_hashes: Arc, factories: Factories, ) -> Result { - let header = view!(BlockView, block_bytes).header_view(); - Ok(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, factories)?.seal(engine, header.seal())?) + let header = Unverified::from_rlp(block_bytes.clone())?.header; + Ok(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, factories)? + .seal(engine, header.seal().to_vec())?) } #[test] @@ -731,7 +733,7 @@ mod tests { let orig_db = b.drain().state.drop().1; let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap(); - let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); + let e = enact_and_seal(orig_bytes.clone(), engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); assert_eq!(e.rlp_bytes(), orig_bytes); @@ -762,7 +764,7 @@ mod tests { let orig_db = b.drain().state.drop().1; let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap(); - let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); + let e = enact_and_seal(orig_bytes.clone(), engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); let bytes = e.rlp_bytes(); assert_eq!(bytes, orig_bytes); diff --git a/ethcore/src/verification/verification.rs b/ethcore/src/verification/verification.rs index a0ecf963420..b5fa95285d6 100644 --- a/ethcore/src/verification/verification.rs +++ b/ethcore/src/verification/verification.rs @@ -376,7 +376,6 @@ mod tests { use types::log_entry::{LogEntry, LocalizedLogEntry}; use rlp; use triehash::ordered_trie_root; - use views::BlockView; fn check_ok(result: Result<(), Error>) { result.unwrap_or_else(|e| panic!("Block verification failed: {:?}", e)); @@ -420,10 +419,10 @@ mod tests { } pub fn insert(&mut self, bytes: Bytes) { - let number = view!(BlockView, &bytes).header_view().number(); - let hash = view!(BlockView, &bytes).header_view().hash(); - self.blocks.insert(hash.clone(), bytes); - self.numbers.insert(number, hash.clone()); + let header = Unverified::from_rlp(bytes.clone()).unwrap().header; + let hash = header.hash(); + self.blocks.insert(hash, bytes); + self.numbers.insert(header.number(), hash); } } @@ -460,11 +459,11 @@ mod tests { /// Get the familial details concerning a block. fn block_details(&self, hash: &H256) -> Option { self.blocks.get(hash).map(|bytes| { - let header = view!(BlockView, bytes).header(); + let header = Unverified::from_rlp(bytes.to_vec()).unwrap().header; BlockDetails { number: header.number(), - total_difficulty: header.difficulty().clone(), - parent: header.parent_hash().clone(), + total_difficulty: *header.difficulty(), + parent: *header.parent_hash(), children: Vec::new(), is_finalized: false, } @@ -501,9 +500,9 @@ mod tests { } fn family_test(bytes: &[u8], engine: &EthEngine, bc: &BC) -> Result<(), Error> where BC: BlockProvider { - let view = view!(BlockView, bytes); - let header = view.header(); - let transactions: Vec<_> = view.transactions() + let block = Unverified::from_rlp(bytes.to_vec()).unwrap(); + let header = block.header; + let transactions: Vec<_> = block.transactions .into_iter() .map(SignedTransaction::new) .collect::>()?; @@ -520,7 +519,7 @@ mod tests { let block = PreverifiedBlock { header, transactions, - uncles: view.uncles(), + uncles: block.uncles, bytes: bytes.to_vec(), }; @@ -533,7 +532,6 @@ mod tests { } fn unordered_test(bytes: &[u8], engine: &EthEngine) -> Result<(), Error> { - use verification::queue::kind::blocks::Unverified; let un = Unverified::from_rlp(bytes.to_vec())?; verify_block_unordered(un, engine, false)?; Ok(()) diff --git a/ethcore/sync/src/blocks.rs b/ethcore/sync/src/blocks.rs index a502cee9c55..3815084f8f1 100644 --- a/ethcore/sync/src/blocks.rs +++ b/ethcore/sync/src/blocks.rs @@ -31,6 +31,7 @@ known_heap_size!(0, HeaderId); type SmallHashVec = SmallVec<[H256; 1]>; +#[derive(PartialEq, Debug, Clone)] pub struct SyncHeader { pub bytes: Bytes, pub header: BlockHeader, @@ -578,7 +579,6 @@ mod test { use ethcore::client::{TestBlockChainClient, EachBlockWith, BlockId, BlockChainClient}; use ethcore::header::BlockNumber; use ethcore::verification::queue::kind::blocks::Unverified; - use ethcore::views::HeaderView; use rlp::*; fn is_empty(bc: &BlockCollection) -> bool { @@ -614,9 +614,9 @@ mod test { let blocks: Vec<_> = (0..nblocks) .map(|i| (&client as &BlockChainClient).block(BlockId::Number(i as BlockNumber)).unwrap().into_inner()) .collect(); - let headers: Vec<_> = blocks.iter().map(|b| Rlp::new(b).at(0).unwrap().as_raw().to_vec()).collect(); - let hashes: Vec<_> = headers.iter().map(|h| view!(HeaderView, h).hash()).collect(); - let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(h.clone()) } else { None }).collect(); + let headers: Vec<_> = blocks.iter().map(|b| SyncHeader::from_rlp(Rlp::new(b).at(0).unwrap().as_raw().to_vec()).unwrap()).collect(); + let hashes: Vec<_> = headers.iter().map(|h| h.header.hash()).collect(); + let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(*h) } else { None }).collect(); bc.reset_to(heads); assert!(!bc.is_empty()); assert_eq!(hashes[0], bc.heads[0]); @@ -631,7 +631,7 @@ mod test { assert_eq!(bc.downloading_headers.len(), 1); assert!(bc.drain().is_empty()); - bc.insert_headers(headers[0..6].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[0..6].into_iter().map(Clone::clone).collect()); assert_eq!(hashes[5], bc.heads[0]); for h in &hashes[0..6] { bc.clear_header_download(h) @@ -651,9 +651,9 @@ mod test { assert_eq!(hashes[5], h); let (h, _) = bc.needed_headers(6, false).unwrap(); assert_eq!(hashes[20], h); - bc.insert_headers(headers[10..16].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[10..16].into_iter().map(Clone::clone).collect()); assert!(bc.drain().is_empty()); - bc.insert_headers(headers[5..10].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[5..10].into_iter().map(Clone::clone).collect()); assert_eq!( bc.drain().into_iter().map(|b| b.block).collect::>(), blocks[6..16].iter().map(|b| Unverified::from_rlp(b.to_vec()).unwrap()).collect::>() @@ -661,7 +661,7 @@ mod test { assert_eq!(hashes[15], bc.heads[0]); - bc.insert_headers(headers[15..].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[15..].into_iter().map(Clone::clone).collect()); bc.drain(); assert!(bc.is_empty()); } @@ -676,16 +676,16 @@ mod test { let blocks: Vec<_> = (0..nblocks) .map(|i| (&client as &BlockChainClient).block(BlockId::Number(i as BlockNumber)).unwrap().into_inner()) .collect(); - let headers: Vec<_> = blocks.iter().map(|b| Rlp::new(b).at(0).unwrap().as_raw().to_vec()).collect(); - let hashes: Vec<_> = headers.iter().map(|h| view!(HeaderView, h).hash()).collect(); - let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(h.clone()) } else { None }).collect(); + let headers: Vec<_> = blocks.iter().map(|b| SyncHeader::from_rlp(Rlp::new(b).at(0).unwrap().as_raw().to_vec()).unwrap()).collect(); + let hashes: Vec<_> = headers.iter().map(|h| h.header.hash()).collect(); + let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(*h) } else { None }).collect(); bc.reset_to(heads); - bc.insert_headers(headers[2..22].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[2..22].into_iter().map(Clone::clone).collect()); assert_eq!(hashes[0], bc.heads[0]); assert_eq!(hashes[21], bc.heads[1]); assert!(bc.head.is_none()); - bc.insert_headers(headers[0..2].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[0..2].into_iter().map(Clone::clone).collect()); assert!(bc.head.is_some()); assert_eq!(hashes[21], bc.heads[0]); } @@ -700,14 +700,14 @@ mod test { let blocks: Vec<_> = (0..nblocks) .map(|i| (&client as &BlockChainClient).block(BlockId::Number(i as BlockNumber)).unwrap().into_inner()) .collect(); - let headers: Vec<_> = blocks.iter().map(|b| Rlp::new(b).at(0).unwrap().as_raw().to_vec()).collect(); - let hashes: Vec<_> = headers.iter().map(|h| view!(HeaderView, h).hash()).collect(); - let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(h.clone()) } else { None }).collect(); + let headers: Vec<_> = blocks.iter().map(|b| SyncHeader::from_rlp(Rlp::new(b).at(0).unwrap().as_raw().to_vec()).unwrap()).collect(); + let hashes: Vec<_> = headers.iter().map(|h| h.header.hash()).collect(); + let heads: Vec<_> = hashes.iter().enumerate().filter_map(|(i, h)| if i % 20 == 0 { Some(*h) } else { None }).collect(); bc.reset_to(heads); - bc.insert_headers(headers[1..2].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[1..2].into_iter().map(Clone::clone).collect()); assert!(bc.drain().is_empty()); - bc.insert_headers(headers[0..1].iter().map(|h| SyncHeader::from_rlp(h.to_vec()).unwrap()).collect()); + bc.insert_headers(headers[0..1].into_iter().map(Clone::clone).collect()); assert_eq!(bc.drain().len(), 2); } } diff --git a/ethcore/sync/src/chain/supplier.rs b/ethcore/sync/src/chain/supplier.rs index e8a5c93ea3d..e2113b0b1c2 100644 --- a/ethcore/sync/src/chain/supplier.rs +++ b/ethcore/sync/src/chain/supplier.rs @@ -307,11 +307,11 @@ mod test { use bytes::Bytes; use rlp::{Rlp, RlpStream}; use super::{*, super::tests::*}; + use blocks::SyncHeader; use ethcore::client::{BlockChainClient, EachBlockWith, TestBlockChainClient}; #[test] fn return_block_headers() { - use ethcore::views::HeaderView; fn make_hash_req(h: &H256, count: usize, skip: usize, reverse: bool) -> Bytes { let mut rlp = RlpStream::new_list(4); rlp.append(h); @@ -329,16 +329,16 @@ mod test { rlp.append(&if reverse {1u32} else {0u32}); rlp.out() } - fn to_header_vec(rlp: ::chain::RlpResponseResult) -> Vec { - Rlp::new(&rlp.unwrap().unwrap().1.out()).iter().map(|r| r.as_raw().to_vec()).collect() + fn to_header_vec(rlp: ::chain::RlpResponseResult) -> Vec { + Rlp::new(&rlp.unwrap().unwrap().1.out()).iter().map(|r| SyncHeader::from_rlp(r.as_raw().to_vec()).unwrap()).collect() } let mut client = TestBlockChainClient::new(); client.add_blocks(100, EachBlockWith::Nothing); let blocks: Vec<_> = (0 .. 100) .map(|i| (&client as &BlockChainClient).block(BlockId::Number(i as BlockNumber)).map(|b| b.into_inner()).unwrap()).collect(); - let headers: Vec<_> = blocks.iter().map(|b| Rlp::new(b).at(0).unwrap().as_raw().to_vec()).collect(); - let hashes: Vec<_> = headers.iter().map(|h| view!(HeaderView, h).hash()).collect(); + let headers: Vec<_> = blocks.iter().map(|b| SyncHeader::from_rlp(Rlp::new(b).at(0).unwrap().as_raw().to_vec()).unwrap()).collect(); + let hashes: Vec<_> = headers.iter().map(|h| h.header.hash()).collect(); let queue = RwLock::new(VecDeque::new()); let ss = TestSnapshotService::new(); diff --git a/ethcore/sync/src/lib.rs b/ethcore/sync/src/lib.rs index 18a185e511c..9fb7da99053 100644 --- a/ethcore/sync/src/lib.rs +++ b/ethcore/sync/src/lib.rs @@ -27,7 +27,6 @@ extern crate ethcore_network_devp2p as devp2p; extern crate parity_bytes as bytes; extern crate ethcore_io as io; extern crate ethcore_transaction as transaction; -#[cfg_attr(test, macro_use)] extern crate ethcore; extern crate ethereum_types; extern crate env_logger; From 5ed25276635f66450925cba3081028a36de5150d Mon Sep 17 00:00:00 2001 From: Alexey Date: Fri, 24 Aug 2018 20:03:46 +0400 Subject: [PATCH 05/15] `gasleft` extern implemented for WASM runtime (kip-6) (#9357) * Wasm gasleft extern added * wasm_gasleft_activation_transition -> kip4_transition * use kip-6 switch * gasleft_panic -> gasleft_fail rename * call_msg_gasleft test added and gas_left agustments because this https://github.com/paritytech/wasm-tests/pull/52 * change .. to _ * fix comment for the have_gasleft param * update tests (https://github.com/paritytech/wasm-tests/pull/54/commits/0edbf860ff7ed4b6b6336097ba44836e8c6482dd) --- ethcore/res/wasm-tests | 2 +- ethcore/src/spec/spec.rs | 9 +++ ethcore/vm/src/schedule.rs | 3 + ethcore/wasm/src/env.rs | 9 +++ ethcore/wasm/src/runtime.rs | 10 ++++ ethcore/wasm/src/tests.rs | 109 ++++++++++++++++++++++++++++++++---- json/src/spec/params.rs | 3 + 7 files changed, 134 insertions(+), 11 deletions(-) diff --git a/ethcore/res/wasm-tests b/ethcore/res/wasm-tests index 242b8d8a89e..0edbf860ff7 160000 --- a/ethcore/res/wasm-tests +++ b/ethcore/res/wasm-tests @@ -1 +1 @@ -Subproject commit 242b8d8a89ecb3e11277f0beb8180c95792aac6b +Subproject commit 0edbf860ff7ed4b6b6336097ba44836e8c6482dd diff --git a/ethcore/src/spec/spec.rs b/ethcore/src/spec/spec.rs index a83046a7258..72019811455 100644 --- a/ethcore/src/spec/spec.rs +++ b/ethcore/src/spec/spec.rs @@ -127,6 +127,8 @@ pub struct CommonParams { pub wasm_activation_transition: BlockNumber, /// Number of first block where KIP-4 rules begin. Only has effect if Wasm is activated. pub kip4_transition: BlockNumber, + /// Number of first block where KIP-6 rules begin. Only has effect if Wasm is activated. + pub kip6_transition: BlockNumber, /// Gas limit bound divisor (how much gas limit can change per block) pub gas_limit_bound_divisor: U256, /// Registrar contract address. @@ -195,6 +197,9 @@ impl CommonParams { if block_number >= self.kip4_transition { wasm.have_create2 = true; } + if block_number >= self.kip6_transition { + wasm.have_gasleft = true; + } schedule.wasm = Some(wasm); } } @@ -308,6 +313,10 @@ impl From for CommonParams { BlockNumber::max_value, Into::into ), + kip6_transition: p.kip6_transition.map_or_else( + BlockNumber::max_value, + Into::into + ), } } } diff --git a/ethcore/vm/src/schedule.rs b/ethcore/vm/src/schedule.rs index ec72c4683fd..2d263b63efd 100644 --- a/ethcore/vm/src/schedule.rs +++ b/ethcore/vm/src/schedule.rs @@ -151,6 +151,8 @@ pub struct WasmCosts { pub opcodes_div: u32, /// Whether create2 extern function is activated. pub have_create2: bool, + /// Whether gasleft extern function is activated. + pub have_gasleft: bool, } impl Default for WasmCosts { @@ -169,6 +171,7 @@ impl Default for WasmCosts { opcodes_mul: 3, opcodes_div: 8, have_create2: false, + have_gasleft: false, } } } diff --git a/ethcore/wasm/src/env.rs b/ethcore/wasm/src/env.rs index a9e536f5f1f..fb9e93e0fd0 100644 --- a/ethcore/wasm/src/env.rs +++ b/ethcore/wasm/src/env.rs @@ -49,6 +49,7 @@ pub mod ids { pub const ORIGIN_FUNC: usize = 200; pub const ELOG_FUNC: usize = 210; pub const CREATE2_FUNC: usize = 220; + pub const GASLEFT_FUNC: usize = 230; pub const PANIC_FUNC: usize = 1000; pub const DEBUG_FUNC: usize = 1010; @@ -157,6 +158,11 @@ pub mod signatures { None, ); + pub const GASLEFT: StaticSignature = StaticSignature( + &[], + Some(I64), + ); + pub const GASLIMIT: StaticSignature = StaticSignature( &[I32], None, @@ -207,6 +213,7 @@ pub struct ImportResolver { memory: RefCell>, have_create2: bool, + have_gasleft: bool, } impl ImportResolver { @@ -217,6 +224,7 @@ impl ImportResolver { memory: RefCell::new(None), have_create2: schedule.have_create2, + have_gasleft: schedule.have_gasleft, } } @@ -274,6 +282,7 @@ impl wasmi::ModuleImportResolver for ImportResolver { "origin" => host(signatures::ORIGIN, ids::ORIGIN_FUNC), "elog" => host(signatures::ELOG, ids::ELOG_FUNC), "create2" if self.have_create2 => host(signatures::CREATE2, ids::CREATE2_FUNC), + "gasleft" if self.have_gasleft => host(signatures::GASLEFT, ids::GASLEFT_FUNC), _ => { return Err(wasmi::Error::Instantiation( format!("Export {} not found", field_name), diff --git a/ethcore/wasm/src/runtime.rs b/ethcore/wasm/src/runtime.rs index 1c814ab7cdd..3c5d27d5c19 100644 --- a/ethcore/wasm/src/runtime.rs +++ b/ethcore/wasm/src/runtime.rs @@ -666,6 +666,15 @@ impl<'a> Runtime<'a> { self.return_u256_ptr(args.nth_checked(0)?, difficulty) } + /// Signature: `fn gasleft() -> i64` + pub fn gasleft(&mut self) -> Result { + Ok(RuntimeValue::from( + self.gas_left()? * self.ext.schedule().wasm().opcodes_mul as u64 + / self.ext.schedule().wasm().opcodes_div as u64 + ) + ) + } + /// Signature: `fn gaslimit(dest: *mut u8)` pub fn gaslimit(&mut self, args: RuntimeArgs) -> Result<()> { let gas_limit = self.ext.env_info().gas_limit; @@ -782,6 +791,7 @@ mod ext_impl { ORIGIN_FUNC => void!(self.origin(args)), ELOG_FUNC => void!(self.elog(args)), CREATE2_FUNC => some!(self.create2(args)), + GASLEFT_FUNC => some!(self.gasleft()), _ => panic!("env module doesn't provide function at index {}", index), } } diff --git a/ethcore/wasm/src/tests.rs b/ethcore/wasm/src/tests.rs index fdbb545904c..b1a773cb422 100644 --- a/ethcore/wasm/src/tests.rs +++ b/ethcore/wasm/src/tests.rs @@ -303,7 +303,7 @@ fn create() { &FakeCall { call_type: FakeCallType::Create, create_scheme: Some(CreateContractAddress::FromSenderAndCodeHash), - gas: U256::from(52_017), + gas: U256::from(49_674), sender_address: None, receive_address: None, value: Some((1_000_000_000 / 2).into()), @@ -315,7 +315,7 @@ fn create() { &FakeCall { call_type: FakeCallType::Create, create_scheme: Some(CreateContractAddress::FromSenderSaltAndCodeHash(H256::from([5u8].as_ref()))), - gas: U256::from(10_740), + gas: U256::from(6039), sender_address: None, receive_address: None, value: Some((1_000_000_000 / 2).into()), @@ -323,7 +323,7 @@ fn create() { code_address: None, } )); - assert_eq!(gas_left, U256::from(10_675)); + assert_eq!(gas_left, U256::from(5974)); } #[test] @@ -371,6 +371,54 @@ fn call_msg() { assert_eq!(gas_left, U256::from(91_672)); } +// The same as `call_msg`, but send a `pwasm_ethereum::gasleft` +// value as `gas` argument to the inner pwasm_ethereum::call +#[test] +fn call_msg_gasleft() { + ::ethcore_logger::init_log(); + + let sender: Address = "01030507090b0d0f11131517191b1d1f21232527".parse().unwrap(); + let receiver: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap(); + let contract_address: Address = "0d461d4174b4ae35775c4a342f1e5e1e4e6c4db5".parse().unwrap(); + + let mut params = ActionParams::default(); + params.sender = sender.clone(); + params.address = receiver.clone(); + params.code_address = contract_address.clone(); + params.gas = U256::from(100_000); + params.code = Some(Arc::new(load_sample!("call_gasleft.wasm"))); + params.data = Some(Vec::new()); + + let mut ext = FakeExt::new().with_wasm(); + ext.schedule.wasm.as_mut().unwrap().have_gasleft = true; + ext.balances.insert(receiver.clone(), U256::from(10000000000u64)); + + let gas_left = { + let mut interpreter = wasm_interpreter(params); + let result = interpreter.exec(&mut ext).expect("Interpreter to execute without any errors"); + match result { + GasLeft::Known(gas_left) => gas_left, + GasLeft::NeedsReturn { .. } => { panic!("Call test should not return payload"); }, + } + }; + + trace!(target: "wasm", "fake_calls: {:?}", &ext.calls); + assert!(ext.calls.contains( + &FakeCall { + call_type: FakeCallType::Call, + create_scheme: None, + gas: U256::from(91_165), + sender_address: Some(receiver), + receive_address: Some(Address::from([99, 88, 77, 66, 55, 44, 33, 22, 11, 0, 11, 22, 33, 44, 55, 66, 77, 88, 99, 0])), + value: Some(1000000000.into()), + data: vec![129u8, 123, 113, 107, 101, 97], + code_address: Some(Address::from([99, 88, 77, 66, 55, 44, 33, 22, 11, 0, 11, 22, 33, 44, 55, 66, 77, 88, 99, 0])), + } + )); + + assert_eq!(gas_left, U256::from(91_671)); +} + #[test] fn call_code() { ::ethcore_logger::init_log(); @@ -591,7 +639,7 @@ fn math_add() { U256::from_dec_str("1888888888888888888888888888887").unwrap(), (&result[..]).into() ); - assert_eq!(gas_left, U256::from(92_095)); + assert_eq!(gas_left, U256::from(92_072)); } // multiplication @@ -613,7 +661,7 @@ fn math_mul() { U256::from_dec_str("888888888888888888888888888887111111111111111111111111111112").unwrap(), (&result[..]).into() ); - assert_eq!(gas_left, U256::from(91_423)); + assert_eq!(gas_left, U256::from(91_400)); } // subtraction @@ -635,7 +683,7 @@ fn math_sub() { U256::from_dec_str("111111111111111111111111111111").unwrap(), (&result[..]).into() ); - assert_eq!(gas_left, U256::from(92_095)); + assert_eq!(gas_left, U256::from(92_072)); } // subtraction with overflow @@ -677,7 +725,7 @@ fn math_div() { U256::from_dec_str("1125000").unwrap(), (&result[..]).into() ); - assert_eq!(gas_left, U256::from(87_379)); + assert_eq!(gas_left, U256::from(85_700)); } #[test] @@ -705,7 +753,7 @@ fn storage_metering() { }; // 0 -> not 0 - assert_eq!(gas_left, U256::from(72_395)); + assert_eq!(gas_left, U256::from(72_164)); // #2 @@ -724,7 +772,7 @@ fn storage_metering() { }; // not 0 -> not 0 - assert_eq!(gas_left, U256::from(87_395)); + assert_eq!(gas_left, U256::from(87_164)); } // This test checks the ability of wasm contract to invoke @@ -815,6 +863,47 @@ fn externs() { assert_eq!(gas_left, U256::from(90_428)); } +// This test checks the ability of wasm contract to invoke gasleft +#[test] +fn gasleft() { + ::ethcore_logger::init_log(); + + let mut params = ActionParams::default(); + params.gas = U256::from(100_000); + params.code = Some(Arc::new(load_sample!("gasleft.wasm"))); + + let mut ext = FakeExt::new().with_wasm(); + ext.schedule.wasm.as_mut().unwrap().have_gasleft = true; + + let mut interpreter = wasm_interpreter(params); + let result = interpreter.exec(&mut ext).expect("Interpreter to execute without any errors"); + match result { + GasLeft::Known(_) => {}, + GasLeft::NeedsReturn { gas_left, data, .. } => { + let gas = LittleEndian::read_u64(data.as_ref()); + assert_eq!(gas, 93_423); + assert_eq!(gas_left, U256::from(93_349)); + }, + } +} + +// This test should fail because +// ext.schedule.wasm.as_mut().unwrap().have_gasleft = false; +#[test] +fn gasleft_fail() { + ::ethcore_logger::init_log(); + + let mut params = ActionParams::default(); + params.gas = U256::from(100_000); + params.code = Some(Arc::new(load_sample!("gasleft.wasm"))); + let mut ext = FakeExt::new().with_wasm(); + let mut interpreter = wasm_interpreter(params); + match interpreter.exec(&mut ext) { + Err(_) => {}, + Ok(_) => panic!("interpreter.exec should return Err if ext.schedule.wasm.have_gasleft = false") + } +} + #[test] fn embedded_keccak() { ::ethcore_logger::init_log(); @@ -873,7 +962,7 @@ fn events() { assert_eq!(&log_entry.data, b"gnihtemos"); assert_eq!(&result, b"gnihtemos"); - assert_eq!(gas_left, U256::from(83_158)); + assert_eq!(gas_left, U256::from(83_161)); } #[test] diff --git a/json/src/spec/params.rs b/json/src/spec/params.rs index cf57e9af456..d3319f7c49e 100644 --- a/json/src/spec/params.rs +++ b/json/src/spec/params.rs @@ -152,6 +152,9 @@ pub struct Params { /// KIP4 activiation block height. #[serde(rename="kip4Transition")] pub kip4_transition: Option, + /// KIP6 activiation block height. + #[serde(rename="kip6Transition")] + pub kip6_transition: Option, } #[cfg(test)] From 7abe9ec4ccb3136052cea56f36022c3f8e9c0fd6 Mon Sep 17 00:00:00 2001 From: Christopher Purta Date: Fri, 24 Aug 2018 09:14:07 -0700 Subject: [PATCH 06/15] Add update docs script to CI (#9219) * Add update docs script to CI Added a script to CI that will use the jsonrpc tool to update rpc documentation then commit and push those to the wiki repo. * fix gitlab ci lint * Only apply jsonrpc docs update on tags * Update gitlab-rpc-docs.sh * Copy correct parity repo to jsonrpc folder Copy correct parity repo to jsonrpc folder before attempting to build docs since the CI runner clones the repo as parity and not parity-ethereum. * Fix JSONRPC docs CI job Update remote config in wiki repo before pushing changes using a github token for authentication. Add message to wiki tag when pushing changes. Use project directory to correctly copy parity code base into the jsonrpc repo for doc generation. * Fix set_remote_wiki function call in CI --- .gitlab-ci.yml | 10 +++++++ scripts/gitlab-rpc-docs.sh | 53 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100755 scripts/gitlab-rpc-docs.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d73d494b24a..ac3e44892fe 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,6 +2,7 @@ stages: - test - push-release - build + - docs variables: RUST_BACKTRACE: "1" RUSTFLAGS: "" @@ -220,6 +221,15 @@ test-rust-nightly: - rust - rust-nightly allow_failure: true +json-rpc-docs: + stage: docs + only: + - tags + image: parity/rust:gitlab-ci + script: + - scripts/gitlab-rpc-docs.sh + tags: + - docs push-release: stage: push-release only: diff --git a/scripts/gitlab-rpc-docs.sh b/scripts/gitlab-rpc-docs.sh new file mode 100755 index 00000000000..de03fc69f26 --- /dev/null +++ b/scripts/gitlab-rpc-docs.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash + +clone_repos() { + git clone https://github.com/parity-js/jsonrpc.git jsonrpc + git clone https://github.com/paritytech/wiki.git wiki +} + +build_docs() { + npm install + npm run build:markdown +} + +update_wiki_docs() { + for file in $(ls jsonrpc/docs); do + module_name=${file:0:-3} + mv jsonrpc/docs/$file wiki/JSONRPC-$module_name-module.md + done +} + +set_remote_wiki() { + git config remote.origin.url "https://${GITHUB_TOKEN}@github.com/paritytech/wiki.git" +} + +setup_git() { + git config --global user.email "devops@parity.com" + git config --global user.name "Devops Parity" +} + +commit_files() { + git checkout -b rpcdoc-update-${CI_COMMIT_REF_NAME} + git add . + git commit -m "Update docs to ${CI_COMMIT_REF_NAME}" + git tag -a "${CI_COMMIT_REF_NAME}" -m "Updated to ${CI_COMMIT_REF_NAME}" +} + +upload_files() { + git push --tags +} + +PROJECT_DIR=$(pwd) + +setup_git +cd .. +clone_repos +cp -r $PROJECT_DIR jsonrpc/.parity +cd jsonrpc +build_docs +cd .. +update_wiki_docs +cd wiki +set_remote_wiki +commit_files +upload_files From 9ed43230cac4788bc2a72efb0e3db792109cca82 Mon Sep 17 00:00:00 2001 From: Jim Posen Date: Sat, 25 Aug 2018 14:06:01 -0700 Subject: [PATCH 07/15] Better support for eth_getLogs in light mode (#9186) * Light client on-demand request for headers range. * Cache headers in HeaderWithAncestors response. Also fulfills request locally if all headers are in cache. * LightFetch::logs fetches missing headers on demand. * LightFetch::logs limit the number of headers requested at a time. * LightFetch::logs refactor header fetching logic. * Enforce limit on header range length in light client logs request. * Fix light request tests after struct change. * Respond to review comments. --- Cargo.lock | 1 + ethcore/light/src/lib.rs | 2 +- ethcore/light/src/on_demand/mod.rs | 2 + ethcore/light/src/on_demand/request.rs | 198 ++++++++++++++++++ ethcore/light/src/provider.rs | 6 +- rpc/Cargo.toml | 1 + rpc/src/lib.rs | 1 + rpc/src/v1/helpers/errors.rs | 8 + rpc/src/v1/helpers/light_fetch.rs | 275 ++++++++++++++++++------- 9 files changed, 421 insertions(+), 73 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bbedd2fed82..d02cff5ca48 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2146,6 +2146,7 @@ dependencies = [ "ethstore 0.2.0", "fake-fetch 0.0.1", "fake-hardware-wallet 0.0.1", + "fastmap 0.1.0", "fetch 0.1.0", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/ethcore/light/src/lib.rs b/ethcore/light/src/lib.rs index e151267a9c9..5510ca4aa58 100644 --- a/ethcore/light/src/lib.rs +++ b/ethcore/light/src/lib.rs @@ -43,7 +43,7 @@ pub mod provider; mod types; pub use self::cache::Cache; -pub use self::provider::Provider; +pub use self::provider::{Provider, MAX_HEADERS_PER_REQUEST}; pub use self::transaction_queue::TransactionQueue; pub use types::request as request; diff --git a/ethcore/light/src/on_demand/mod.rs b/ethcore/light/src/on_demand/mod.rs index 594d0dee400..a78adb1ed99 100644 --- a/ethcore/light/src/on_demand/mod.rs +++ b/ethcore/light/src/on_demand/mod.rs @@ -204,6 +204,8 @@ fn guess_capabilities(requests: &[CheckedRequest]) -> Capabilities { caps.serve_headers = true, CheckedRequest::HeaderByHash(_, _) => caps.serve_headers = true, + CheckedRequest::HeaderWithAncestors(_, _) => + caps.serve_headers = true, CheckedRequest::TransactionIndex(_, _) => {} // hashes yield no info. CheckedRequest::Signal(_, _) => caps.serve_headers = true, diff --git a/ethcore/light/src/on_demand/request.rs b/ethcore/light/src/on_demand/request.rs index c305dea94f9..f3a451e6be7 100644 --- a/ethcore/light/src/on_demand/request.rs +++ b/ethcore/light/src/on_demand/request.rs @@ -16,6 +16,7 @@ //! Request types, verification, and verification errors. +use std::cmp; use std::sync::Arc; use bytes::Bytes; @@ -47,6 +48,8 @@ pub enum Request { HeaderProof(HeaderProof), /// A request for a header by hash. HeaderByHash(HeaderByHash), + /// A request for a header by hash with a range of its ancestors. + HeaderWithAncestors(HeaderWithAncestors), /// A request for the index of a transaction. TransactionIndex(TransactionIndex), /// A request for block receipts. @@ -136,6 +139,7 @@ macro_rules! impl_single { // implement traits for each kind of request. impl_single!(HeaderProof, HeaderProof, (H256, U256)); impl_single!(HeaderByHash, HeaderByHash, encoded::Header); +impl_single!(HeaderWithAncestors, HeaderWithAncestors, Vec); impl_single!(TransactionIndex, TransactionIndex, net_request::TransactionIndexResponse); impl_single!(Receipts, BlockReceipts, Vec); impl_single!(Body, Body, encoded::Block); @@ -246,6 +250,7 @@ impl From for HeaderRef { pub enum CheckedRequest { HeaderProof(HeaderProof, net_request::IncompleteHeaderProofRequest), HeaderByHash(HeaderByHash, net_request::IncompleteHeadersRequest), + HeaderWithAncestors(HeaderWithAncestors, net_request::IncompleteHeadersRequest), TransactionIndex(TransactionIndex, net_request::IncompleteTransactionIndexRequest), Receipts(BlockReceipts, net_request::IncompleteReceiptsRequest), Body(Body, net_request::IncompleteBodyRequest), @@ -268,6 +273,16 @@ impl From for CheckedRequest { trace!(target: "on_demand", "HeaderByHash Request, {:?}", net_req); CheckedRequest::HeaderByHash(req, net_req) } + Request::HeaderWithAncestors(req) => { + let net_req = net_request::IncompleteHeadersRequest { + start: req.block_hash.map(Into::into), + skip: 0, + max: req.ancestor_count + 1, + reverse: true, + }; + trace!(target: "on_demand", "HeaderWithAncestors Request, {:?}", net_req); + CheckedRequest::HeaderWithAncestors(req, net_req) + } Request::HeaderProof(req) => { let net_req = net_request::IncompleteHeaderProofRequest { num: req.num().into(), @@ -344,6 +359,7 @@ impl CheckedRequest { match self { CheckedRequest::HeaderProof(_, req) => NetRequest::HeaderProof(req), CheckedRequest::HeaderByHash(_, req) => NetRequest::Headers(req), + CheckedRequest::HeaderWithAncestors(_, req) => NetRequest::Headers(req), CheckedRequest::TransactionIndex(_, req) => NetRequest::TransactionIndex(req), CheckedRequest::Receipts(_, req) => NetRequest::Receipts(req), CheckedRequest::Body(_, req) => NetRequest::Body(req), @@ -399,6 +415,27 @@ impl CheckedRequest { None } + CheckedRequest::HeaderWithAncestors(_, ref req) => { + if req.skip != 1 || !req.reverse { + return None; + } + + if let Some(&net_request::HashOrNumber::Hash(start)) = req.start.as_ref() { + let mut result = Vec::with_capacity(req.max as usize); + let mut hash = start; + let mut cache = cache.lock(); + for _ in 0..req.max { + match cache.block_header(&hash) { + Some(header) => { + hash = header.parent_hash(); + result.push(header); + } + None => return None, + } + } + Some(Response::HeaderWithAncestors(result)) + } else { None } + } CheckedRequest::Receipts(ref check, ref req) => { // empty transactions -> no receipts if check.0.as_ref().ok().map_or(false, |hdr| hdr.receipts_root() == KECCAK_NULL_RLP) { @@ -467,6 +504,7 @@ macro_rules! match_me { match $me { CheckedRequest::HeaderProof($check, $req) => $e, CheckedRequest::HeaderByHash($check, $req) => $e, + CheckedRequest::HeaderWithAncestors($check, $req) => $e, CheckedRequest::TransactionIndex($check, $req) => $e, CheckedRequest::Receipts($check, $req) => $e, CheckedRequest::Body($check, $req) => $e, @@ -496,6 +534,15 @@ impl IncompleteRequest for CheckedRequest { _ => Ok(()), } } + CheckedRequest::HeaderWithAncestors(ref check, ref req) => { + req.check_outputs(&mut f)?; + + // make sure the output given is definitively a hash. + match check.block_hash { + Field::BackReference(r, idx) => f(r, idx, OutputKind::Hash), + _ => Ok(()), + } + } CheckedRequest::TransactionIndex(_, ref req) => req.check_outputs(f), CheckedRequest::Receipts(_, ref req) => req.check_outputs(f), CheckedRequest::Body(_, ref req) => req.check_outputs(f), @@ -524,6 +571,10 @@ impl IncompleteRequest for CheckedRequest { trace!(target: "on_demand", "HeaderByHash request completed {:?}", req); req.complete().map(CompleteRequest::Headers) } + CheckedRequest::HeaderWithAncestors(_, req) => { + trace!(target: "on_demand", "HeaderWithAncestors request completed {:?}", req); + req.complete().map(CompleteRequest::Headers) + } CheckedRequest::TransactionIndex(_, req) => { trace!(target: "on_demand", "TransactionIndex request completed {:?}", req); req.complete().map(CompleteRequest::TransactionIndex) @@ -587,6 +638,9 @@ impl net_request::CheckedRequest for CheckedRequest { CheckedRequest::HeaderByHash(ref prover, _) => expect!((&NetResponse::Headers(ref res), &CompleteRequest::Headers(ref req)) => prover.check_response(cache, &req.start, &res.headers).map(Response::HeaderByHash)), + CheckedRequest::HeaderWithAncestors(ref prover, _) => + expect!((&NetResponse::Headers(ref res), &CompleteRequest::Headers(ref req)) => + prover.check_response(cache, &req.start, &res.headers).map(Response::HeaderWithAncestors)), CheckedRequest::TransactionIndex(ref prover, _) => expect!((&NetResponse::TransactionIndex(ref res), _) => prover.check_response(cache, res).map(Response::TransactionIndex)), @@ -620,6 +674,8 @@ pub enum Response { HeaderProof((H256, U256)), /// Response to a header-by-hash request. HeaderByHash(encoded::Header), + /// Response to a header-by-hash with ancestors request. + HeaderWithAncestors(Vec), /// Response to a transaction-index request. TransactionIndex(net_request::TransactionIndexResponse), /// Response to a receipts request. @@ -661,6 +717,10 @@ pub enum Error { Decoder(::rlp::DecoderError), /// Empty response. Empty, + /// Response data length exceeds request max. + TooManyResults(u64, u64), + /// Response data is incomplete. + TooFewResults(u64, u64), /// Trie lookup error (result of bad proof) Trie(TrieError), /// Bad inclusion proof @@ -677,6 +737,8 @@ pub enum Error { WrongTrieRoot(H256, H256), /// Wrong response kind. WrongKind, + /// Wrong sequence of headers. + WrongHeaderSequence, } impl From<::rlp::DecoderError> for Error { @@ -737,6 +799,65 @@ impl HeaderProof { } } +/// Request for a header by hash with a range of ancestors. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct HeaderWithAncestors { + /// Hash of the last block in the range to fetch. + pub block_hash: Field, + /// Number of headers before the last block to fetch in addition. + pub ancestor_count: u64, +} + +impl HeaderWithAncestors { + /// Check a response for the headers. + pub fn check_response( + &self, + cache: &Mutex<::cache::Cache>, + start: &net_request::HashOrNumber, + headers: &[encoded::Header] + ) -> Result, Error> { + let expected_hash = match (self.block_hash, start) { + (Field::Scalar(ref h), &net_request::HashOrNumber::Hash(ref h2)) => { + if h != h2 { return Err(Error::WrongHash(*h, *h2)) } + *h + } + (_, &net_request::HashOrNumber::Hash(h2)) => h2, + _ => return Err(Error::HeaderByNumber), + }; + + let start_header = headers.first().ok_or(Error::Empty)?; + let start_hash = start_header.hash(); + if start_hash != expected_hash { + return Err(Error::WrongHash(expected_hash, start_hash)); + } + + let expected_len = 1 + cmp::min(self.ancestor_count, start_header.number()); + let actual_len = headers.len() as u64; + match actual_len.cmp(&expected_len) { + cmp::Ordering::Less => + return Err(Error::TooFewResults(expected_len, actual_len)), + cmp::Ordering::Greater => + return Err(Error::TooManyResults(expected_len, actual_len)), + cmp::Ordering::Equal => (), + }; + + for (header, prev_header) in headers.iter().zip(headers[1..].iter()) { + if header.number() != prev_header.number() + 1 || + header.parent_hash() != prev_header.hash() + { + return Err(Error::WrongHeaderSequence) + } + } + + let mut cache = cache.lock(); + for header in headers { + cache.insert_block_header(header.hash(), header.clone()); + } + + Ok(headers.to_vec()) + } +} + /// Request for a header by hash. #[derive(Debug, Clone, PartialEq, Eq)] pub struct HeaderByHash(pub Field); @@ -1045,6 +1166,83 @@ mod tests { assert!(HeaderByHash(hash.into()).check_response(&cache, &hash.into(), &[raw_header]).is_ok()) } + #[test] + fn check_header_with_ancestors() { + let mut last_header_hash = H256::default(); + let mut headers = (0..11).map(|num| { + let mut header = Header::new(); + header.set_number(num); + header.set_parent_hash(last_header_hash); + + last_header_hash = header.hash(); + header + }).collect::>(); + + headers.reverse(); // because responses are in reverse order + + let raw_headers = headers.iter() + .map(|hdr| encoded::Header::new(::rlp::encode(hdr).into_vec())) + .collect::>(); + + let mut invalid_successor = Header::new(); + invalid_successor.set_number(11); + invalid_successor.set_parent_hash(headers[1].hash()); + + let raw_invalid_successor = encoded::Header::new(::rlp::encode(&invalid_successor).into_vec()); + + let cache = Mutex::new(make_cache()); + + let header_with_ancestors = |hash, count| { + HeaderWithAncestors { + block_hash: hash, + ancestor_count: count + } + }; + + // Correct responses + assert!(header_with_ancestors(headers[0].hash().into(), 0) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..1]).is_ok()); + assert!(header_with_ancestors(headers[0].hash().into(), 2) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..3]).is_ok()); + assert!(header_with_ancestors(headers[0].hash().into(), 10) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..11]).is_ok()); + assert!(header_with_ancestors(headers[2].hash().into(), 2) + .check_response(&cache, &headers[2].hash().into(), &raw_headers[2..5]).is_ok()); + assert!(header_with_ancestors(headers[2].hash().into(), 10) + .check_response(&cache, &headers[2].hash().into(), &raw_headers[2..11]).is_ok()); + assert!(header_with_ancestors(invalid_successor.hash().into(), 0) + .check_response(&cache, &invalid_successor.hash().into(), &[raw_invalid_successor.clone()]).is_ok()); + + // Incorrect responses + assert_eq!(header_with_ancestors(invalid_successor.hash().into(), 0) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..1]), + Err(Error::WrongHash(invalid_successor.hash(), headers[0].hash()))); + assert_eq!(header_with_ancestors(headers[0].hash().into(), 0) + .check_response(&cache, &headers[0].hash().into(), &[]), + Err(Error::Empty)); + assert_eq!(header_with_ancestors(headers[0].hash().into(), 10) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..10]), + Err(Error::TooFewResults(11, 10))); + assert_eq!(header_with_ancestors(headers[0].hash().into(), 9) + .check_response(&cache, &headers[0].hash().into(), &raw_headers[0..11]), + Err(Error::TooManyResults(10, 11))); + + let response = &[raw_headers[0].clone(), raw_headers[2].clone()]; + assert_eq!(header_with_ancestors(headers[0].hash().into(), 1) + .check_response(&cache, &headers[0].hash().into(), response), + Err(Error::WrongHeaderSequence)); + + let response = &[raw_invalid_successor.clone(), raw_headers[0].clone()]; + assert_eq!(header_with_ancestors(invalid_successor.hash().into(), 1) + .check_response(&cache, &invalid_successor.hash().into(), response), + Err(Error::WrongHeaderSequence)); + + let response = &[raw_invalid_successor.clone(), raw_headers[1].clone()]; + assert_eq!(header_with_ancestors(invalid_successor.hash().into(), 1) + .check_response(&cache, &invalid_successor.hash().into(), response), + Err(Error::WrongHeaderSequence)); + } + #[test] fn check_body() { use rlp::RlpStream; diff --git a/ethcore/light/src/provider.rs b/ethcore/light/src/provider.rs index a066cefb529..90cbe95b63c 100644 --- a/ethcore/light/src/provider.rs +++ b/ethcore/light/src/provider.rs @@ -33,6 +33,9 @@ use transaction_queue::TransactionQueue; use request; +/// Maximum allowed size of a headers request. +pub const MAX_HEADERS_PER_REQUEST: u64 = 512; + /// Defines the operations that a provider for the light subprotocol must fulfill. pub trait Provider: Send + Sync { /// Provide current blockchain info. @@ -54,7 +57,6 @@ pub trait Provider: Send + Sync { /// results within must adhere to the `skip` and `reverse` parameters. fn block_headers(&self, req: request::CompleteHeadersRequest) -> Option { use request::HashOrNumber; - const MAX_HEADERS_TO_SEND: u64 = 512; if req.max == 0 { return None } @@ -83,7 +85,7 @@ pub trait Provider: Send + Sync { } }; - let max = ::std::cmp::min(MAX_HEADERS_TO_SEND, req.max); + let max = ::std::cmp::min(MAX_HEADERS_PER_REQUEST, req.max); let headers: Vec<_> = (0u64..max) .map(|x: u64| x.saturating_mul(req.skip.saturating_add(1))) diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index b896a44aeb7..3122c5e30b3 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -37,6 +37,7 @@ jsonrpc-pubsub = { git = "https://github.com/paritytech/jsonrpc.git", branch = " ethash = { path = "../ethash" } ethcore = { path = "../ethcore", features = ["test-helpers"] } +fastmap = { path = "../util/fastmap" } parity-bytes = { git = "https://github.com/paritytech/parity-common" } parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-devtools = { path = "../devtools" } diff --git a/rpc/src/lib.rs b/rpc/src/lib.rs index 2e731cd3475..2f3f4968ec9 100644 --- a/rpc/src/lib.rs +++ b/rpc/src/lib.rs @@ -44,6 +44,7 @@ extern crate jsonrpc_pubsub; extern crate ethash; extern crate ethcore; +extern crate fastmap; extern crate parity_bytes as bytes; extern crate parity_crypto as crypto; extern crate ethcore_devtools as devtools; diff --git a/rpc/src/v1/helpers/errors.rs b/rpc/src/v1/helpers/errors.rs index 4afd40ff843..33575fcf1f4 100644 --- a/rpc/src/v1/helpers/errors.rs +++ b/rpc/src/v1/helpers/errors.rs @@ -101,6 +101,14 @@ pub fn request_rejected_limit() -> Error { } } +pub fn request_rejected_param_limit(limit: u64, items_desc: &str) -> Error { + Error { + code: ErrorCode::ServerError(codes::REQUEST_REJECTED_LIMIT), + message: format!("Requested data size exceeds limit of {} {}.", limit, items_desc), + data: None, + } +} + pub fn account(error: &str, details: T) -> Error { Error { code: ErrorCode::ServerError(codes::ACCOUNT_ERROR), diff --git a/rpc/src/v1/helpers/light_fetch.rs b/rpc/src/v1/helpers/light_fetch.rs index 51fb0a5f837..6b1ecf493ba 100644 --- a/rpc/src/v1/helpers/light_fetch.rs +++ b/rpc/src/v1/helpers/light_fetch.rs @@ -16,6 +16,7 @@ //! Helpers for fetching blockchain data either from the light client or the network. +use std::cmp; use std::sync::Arc; use ethcore::basic_account::BasicAccount; @@ -31,7 +32,7 @@ use jsonrpc_macros::Trailing; use light::cache::Cache; use light::client::LightChainClient; -use light::cht; +use light::{cht, MAX_HEADERS_PER_REQUEST}; use light::on_demand::{ request, OnDemand, HeaderRef, Request as OnDemandRequest, Response as OnDemandResponse, ExecutionResult, @@ -42,6 +43,7 @@ use sync::LightSync; use ethereum_types::{U256, Address}; use hash::H256; use parking_lot::Mutex; +use fastmap::H256FastMap; use transaction::{Action, Transaction as EthTransaction, SignedTransaction, LocalizedTransaction}; use v1::helpers::{CallRequest as CallRequestHelper, errors, dispatch}; @@ -299,78 +301,67 @@ impl LightFetch { use std::collections::BTreeMap; use jsonrpc_core::futures::stream::{self, Stream}; - // early exit for "to" block before "from" block. - let best_number = self.client.chain_info().best_block_number; - let block_number = |id| match id { - BlockId::Earliest => Some(0), - BlockId::Latest => Some(best_number), - BlockId::Hash(h) => self.client.block_header(BlockId::Hash(h)).map(|hdr| hdr.number()), - BlockId::Number(x) => Some(x), - }; - - let (from_block_number, from_block_header) = match self.client.block_header(filter.from_block) { - Some(from) => (from.number(), from), - None => return Either::A(future::err(errors::unknown_block())), - }; + const MAX_BLOCK_RANGE: u64 = 1000; - match block_number(filter.to_block) { - Some(to) if to < from_block_number || from_block_number > best_number - => return Either::A(future::ok(Vec::new())), - Some(_) => (), - _ => return Either::A(future::err(errors::unknown_block())), - } - - let maybe_future = self.sync.with_context(move |ctx| { - // find all headers which match the filter, and fetch the receipts for each one. - // match them with their numbers for easy sorting later. - let bit_combos = filter.bloom_possibilities(); - let receipts_futures: Vec<_> = self.client.ancestry_iter(filter.to_block) - .take_while(|ref hdr| hdr.number() != from_block_number) - .chain(Some(from_block_header)) - .filter(|ref hdr| { - let hdr_bloom = hdr.log_bloom(); - bit_combos.iter().any(|bloom| hdr_bloom.contains_bloom(bloom)) - }) - .map(|hdr| (hdr.number(), hdr.hash(), request::BlockReceipts(hdr.into()))) - .map(|(num, hash, req)| self.on_demand.request(ctx, req).expect(NO_INVALID_BACK_REFS).map(move |x| (num, hash, x))) - .collect(); + let fetcher = self.clone(); + self.headers_range_by_block_id(filter.from_block, filter.to_block, MAX_BLOCK_RANGE) + .and_then(move |mut headers| { + if headers.is_empty() { + return Either::A(future::ok(Vec::new())); + } - // as the receipts come in, find logs within them which match the filter. - // insert them into a BTreeMap to maintain order by number and block index. - stream::futures_unordered(receipts_futures) - .fold(BTreeMap::new(), move |mut matches, (num, hash, receipts)| { - let mut block_index = 0; - for (transaction_index, receipt) in receipts.into_iter().enumerate() { - for (transaction_log_index, log) in receipt.logs.into_iter().enumerate() { - if filter.matches(&log) { - matches.insert((num, block_index), Log { - address: log.address.into(), - topics: log.topics.into_iter().map(Into::into).collect(), - data: log.data.into(), - block_hash: Some(hash.into()), - block_number: Some(num.into()), - // No way to easily retrieve transaction hash, so let's just skip it. - transaction_hash: None, - transaction_index: Some(transaction_index.into()), - log_index: Some(block_index.into()), - transaction_log_index: Some(transaction_log_index.into()), - log_type: "mined".into(), - removed: false, - }); + let on_demand = &fetcher.on_demand; + + let maybe_future = fetcher.sync.with_context(move |ctx| { + // find all headers which match the filter, and fetch the receipts for each one. + // match them with their numbers for easy sorting later. + let bit_combos = filter.bloom_possibilities(); + let receipts_futures: Vec<_> = headers.drain(..) + .filter(|ref hdr| { + let hdr_bloom = hdr.log_bloom(); + bit_combos.iter().any(|bloom| hdr_bloom.contains_bloom(bloom)) + }) + .map(|hdr| (hdr.number(), hdr.hash(), request::BlockReceipts(hdr.into()))) + .map(|(num, hash, req)| on_demand.request(ctx, req).expect(NO_INVALID_BACK_REFS).map(move |x| (num, hash, x))) + .collect(); + + // as the receipts come in, find logs within them which match the filter. + // insert them into a BTreeMap to maintain order by number and block index. + stream::futures_unordered(receipts_futures) + .fold(BTreeMap::new(), move |mut matches, (num, hash, receipts)| { + let mut block_index = 0; + for (transaction_index, receipt) in receipts.into_iter().enumerate() { + for (transaction_log_index, log) in receipt.logs.into_iter().enumerate() { + if filter.matches(&log) { + matches.insert((num, block_index), Log { + address: log.address.into(), + topics: log.topics.into_iter().map(Into::into).collect(), + data: log.data.into(), + block_hash: Some(hash.into()), + block_number: Some(num.into()), + // No way to easily retrieve transaction hash, so let's just skip it. + transaction_hash: None, + transaction_index: Some(transaction_index.into()), + log_index: Some(block_index.into()), + transaction_log_index: Some(transaction_log_index.into()), + log_type: "mined".into(), + removed: false, + }); + } + block_index += 1; + } } - block_index += 1; - } - } - future::ok(matches) - }) // and then collect them into a vector. - .map(|matches| matches.into_iter().map(|(_, v)| v).collect()) - .map_err(errors::on_demand_cancel) - }); - - match maybe_future { - Some(fut) => Either::B(Either::A(fut)), - None => Either::B(Either::B(future::err(errors::network_disabled()))), - } + future::ok(matches) + }) // and then collect them into a vector. + .map(|matches| matches.into_iter().map(|(_, v)| v).collect()) + .map_err(errors::on_demand_cancel) + }); + + match maybe_future { + Some(fut) => Either::B(Either::A(fut)), + None => Either::B(Either::B(future::err(errors::network_disabled()))), + } + }) } // Get a transaction by hash. also returns the index in the block. @@ -448,6 +439,150 @@ impl LightFetch { None => Box::new(future::err(errors::network_disabled())) as Box + Send> } } + + fn headers_range_by_block_id( + &self, + from_block: BlockId, + to_block: BlockId, + max: u64 + ) -> impl Future, Error = Error> { + let fetch_hashes = [from_block, to_block].iter() + .filter_map(|block_id| match block_id { + BlockId::Hash(hash) => Some(hash.clone()), + _ => None, + }) + .collect::>(); + + let best_number = self.client.chain_info().best_block_number; + + let fetcher = self.clone(); + self.headers_by_hash(&fetch_hashes[..]).and_then(move |mut header_map| { + let (from_block_num, to_block_num) = { + let block_number = |id| match id { + &BlockId::Earliest => 0, + &BlockId::Latest => best_number, + &BlockId::Hash(ref h) => + header_map.get(h).map(|hdr| hdr.number()) + .expect("from_block and to_block headers are fetched by hash; this closure is only called on from_block and to_block; qed"), + &BlockId::Number(x) => x, + }; + (block_number(&from_block), block_number(&to_block)) + }; + + if to_block_num < from_block_num { + // early exit for "to" block before "from" block. + return Either::A(future::err(errors::filter_block_not_found(to_block))); + } else if to_block_num - from_block_num >= max { + return Either::A(future::err(errors::request_rejected_param_limit(max, "blocks"))); + } + + let to_header_hint = match to_block { + BlockId::Hash(ref h) => header_map.remove(h), + _ => None, + }; + let headers_fut = fetcher.headers_range(from_block_num, to_block_num, to_header_hint); + Either::B(headers_fut.map(move |headers| { + // Validate from_block if it's a hash + let last_hash = headers.last().map(|hdr| hdr.hash()); + match (last_hash, from_block) { + (Some(h1), BlockId::Hash(h2)) if h1 != h2 => Vec::new(), + _ => headers, + } + })) + }) + } + + fn headers_by_hash(&self, hashes: &[H256]) -> impl Future, Error = Error> { + let mut refs = H256FastMap::with_capacity_and_hasher(hashes.len(), Default::default()); + let mut reqs = Vec::with_capacity(hashes.len()); + + for hash in hashes { + refs.entry(*hash).or_insert_with(|| { + self.make_header_requests(BlockId::Hash(*hash), &mut reqs) + .expect("make_header_requests never fails for BlockId::Hash; qed") + }); + } + + self.send_requests(reqs, move |res| { + let headers = refs.drain() + .map(|(hash, header_ref)| { + let hdr = extract_header(&res, header_ref) + .expect("these responses correspond to requests that header_ref belongs to; \ + qed"); + (hash, hdr) + }) + .collect(); + headers + }) + } + + fn headers_range( + &self, + from_number: u64, + to_number: u64, + to_header_hint: Option + ) -> impl Future, Error = Error> { + let range_length = (to_number - from_number + 1) as usize; + let mut headers: Vec = Vec::with_capacity(range_length); + + let iter_start = match to_header_hint { + Some(hdr) => { + let block_id = BlockId::Hash(hdr.parent_hash()); + headers.push(hdr); + block_id + } + None => BlockId::Number(to_number), + }; + headers.extend(self.client.ancestry_iter(iter_start) + .take_while(|hdr| hdr.number() >= from_number)); + + let fetcher = self.clone(); + future::loop_fn(headers, move |mut headers| { + let remaining = range_length - headers.len(); + if remaining == 0 { + return Either::A(future::ok(future::Loop::Break(headers))); + } + + let mut reqs: Vec = Vec::with_capacity(2); + + let start_hash = if let Some(hdr) = headers.last() { + hdr.parent_hash().into() + } else { + let cht_root = cht::block_to_cht_number(to_number) + .and_then(|cht_num| fetcher.client.cht_root(cht_num as usize)); + + let cht_root = match cht_root { + Some(cht_root) => cht_root, + None => return Either::A(future::err(errors::unknown_block())), + }; + + let header_proof = request::HeaderProof::new(to_number, cht_root) + .expect("HeaderProof::new is Some(_) if cht::block_to_cht_number() is Some(_); \ + this would return above if block_to_cht_number returned None; qed"); + + let idx = reqs.len(); + let hash_ref = Field::back_ref(idx, 0); + reqs.push(header_proof.into()); + + hash_ref + }; + + let max = cmp::min(remaining as u64, MAX_HEADERS_PER_REQUEST); + reqs.push(request::HeaderWithAncestors { + block_hash: start_hash, + ancestor_count: max - 1, + }.into()); + + Either::B(fetcher.send_requests(reqs, |mut res| { + match res.last_mut() { + Some(&mut OnDemandResponse::HeaderWithAncestors(ref mut res_headers)) => + headers.extend(res_headers.drain(..)), + _ => panic!("reqs has at least one entry; each request maps to a response; qed"), + }; + future::Loop::Continue(headers) + })) + }) + } } #[derive(Clone)] From bd3bc5c16837ae3d25bd834e04e2b469fada351d Mon Sep 17 00:00:00 2001 From: "Denis S. Soldatov aka General-Beck" Date: Sun, 26 Aug 2018 02:44:08 +0400 Subject: [PATCH 08/15] Updating the CI system (#8765) * Updating the CI system with the publication of releases and binary files on github Signed-off-by: Denis S. Soldatov aka General-Beck * add missed scripts * chmod +x scripts * fix download link for github * rebuilding CI scripts * small fixes * update submodule wasm tests * ci: fix merge leftovers * ci: remove gitlab-next from ci triggers * ci: fix git add in docs script * ci: use nightly instead of master for publish triggers * ci: remove sleep from gitlab config * ci: replace ':' with '-' in gitlab targets * ci: fix recursive copy in docs script --- .gitlab-ci.yml | 452 ++++++++++-------- docker/hub/Dockerfile | 79 +-- scripts/cov.sh | 31 -- scripts/docker-build.sh | 11 - scripts/gitlab-build.sh | 234 --------- scripts/gitlab-push-release.sh | 12 - scripts/gitlab-test.sh | 47 -- scripts/gitlab/build-unix.sh | 35 ++ scripts/gitlab/build-windows.sh | 35 ++ scripts/gitlab/clippy.sh | 7 + scripts/gitlab/coverage.sh | 20 + scripts/gitlab/install-readme.sh | 8 + scripts/gitlab/package-snap.sh | 25 + scripts/gitlab/publish-docker.sh | 22 + scripts/gitlab/publish-snap.sh | 18 + scripts/gitlab/push.sh | 67 +++ scripts/gitlab/rpc-docs.sh | 54 +++ scripts/gitlab/rustfmt.sh | 7 + scripts/{ => gitlab}/safe_curl.sh | 0 scripts/gitlab/sign.cmd | 1 + scripts/gitlab/templates/release-table.md | 16 + .../gitlab/templates/snapcraft.template.yaml | 58 +++ scripts/gitlab/test.sh | 28 ++ snap/snapcraft.yaml | 41 -- test.sh | 10 +- 25 files changed, 680 insertions(+), 638 deletions(-) delete mode 100755 scripts/cov.sh delete mode 100755 scripts/docker-build.sh delete mode 100755 scripts/gitlab-build.sh delete mode 100755 scripts/gitlab-push-release.sh delete mode 100755 scripts/gitlab-test.sh create mode 100755 scripts/gitlab/build-unix.sh create mode 100755 scripts/gitlab/build-windows.sh create mode 100755 scripts/gitlab/clippy.sh create mode 100755 scripts/gitlab/coverage.sh create mode 100755 scripts/gitlab/install-readme.sh create mode 100755 scripts/gitlab/package-snap.sh create mode 100755 scripts/gitlab/publish-docker.sh create mode 100755 scripts/gitlab/publish-snap.sh create mode 100755 scripts/gitlab/push.sh create mode 100755 scripts/gitlab/rpc-docs.sh create mode 100755 scripts/gitlab/rustfmt.sh rename scripts/{ => gitlab}/safe_curl.sh (100%) create mode 100755 scripts/gitlab/sign.cmd create mode 100644 scripts/gitlab/templates/release-table.md create mode 100644 scripts/gitlab/templates/snapcraft.template.yaml create mode 100755 scripts/gitlab/test.sh delete mode 100644 snap/snapcraft.yaml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ac3e44892fe..856cdafc926 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,242 +1,296 @@ stages: - test - - push-release - build + - package + - publish - docs + +image: parity/rust:gitlab-ci + variables: - RUST_BACKTRACE: "1" - RUSTFLAGS: "" - CARGOFLAGS: "" - CI_SERVER_NAME: "GitLab CI" + CI_SERVER_NAME: "GitLab CI" + CARGO_HOME: "${CI_PROJECT_DIR}/cargo" + + BUILD_TARGET: ubuntu + BUILD_ARCH: amd64 + CARGO_TARGET: x86_64-unknown-linux-gnu + cache: - key: "$CI_BUILD_STAGE-$CI_BUILD_REF_NAME" + key: "${CI_JOB_NAME}" paths: - - target/ - untracked: true -linux-amd64: - stage: build - image: parity/rust:gitlab-ci - only: - - beta - - tags + - ${CI_PROJECT_DIR}/target/ + - ${CI_PROJECT_DIR}/cargo/ + +.releaseable_branches: # list of git refs for building GitLab artifacts (think "pre-release binaries") + only: &releaseable_branches + - master - stable - - triggers - script: - - rustup default stable - # ARGUMENTS: 1. BUILD_PLATFORM (target for binaries) 2. PLATFORM (target for cargo) 3. ARC (architecture) 4. & 5. CC & CXX flags 6. binary identifier - - scripts/gitlab-build.sh x86_64-unknown-linux-gnu x86_64-unknown-linux-gnu amd64 gcc g++ linux - tags: - - rust-stable - artifacts: - paths: - - parity.zip - name: "stable-x86_64-unknown-linux-gnu_parity" -linux-i686: - stage: build - image: parity/rust-i686:gitlab-ci - only: - beta - tags - - stable - - triggers - script: - - scripts/gitlab-build.sh i686-unknown-linux-gnu i686-unknown-linux-gnu i386 gcc g++ linux - tags: - - rust-i686 + +.publishable_branches: # list of git refs for publishing builds to the "production" locations + only: &publishable_branches + - nightly # Our nightly builds from schedule, on `master` + - "v2*" # Our version tags + +.collect_artifacts: &collect_artifacts artifacts: + name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}" + when: on_success + expire_in: 1 mos paths: - - parity.zip - name: "i686-unknown-linux-gnu" - allow_failure: true -linux-armv7: - stage: build - image: parity/rust-armv7:gitlab-ci - only: - - beta - - tags - - stable - - triggers + - artifacts/ + +.determine_version: &determine_version | + export VERSION=$(grep -m 1 "version =" Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") + echo "Version" $VERSION + + +#### stage: test + +test-rust-stable: &test + stage: test script: - - scripts/gitlab-build.sh armv7-unknown-linux-gnueabihf armv7-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ linux + - scripts/gitlab/test.sh stable tags: - - rust-arm - artifacts: - paths: - - parity.zip - name: "armv7_unknown_linux_gnueabihf_parity" - allow_failure: true -linux-armhf: - stage: build - image: parity/rust-arm:gitlab-ci + - rust-stable + +.optional_test: &optional_test + <<: *test + allow_failure: true only: - - beta - - tags - - stable - - triggers + - master + +test-rust-beta: + <<: *optional_test script: - - scripts/gitlab-build.sh arm-unknown-linux-gnueabihf arm-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ linux - tags: - - rust-arm - artifacts: - paths: - - parity.zip - name: "arm-unknown-linux-gnueabihf_parity" - allow_failure: true -linux-aarch64: - stage: build - image: parity/rust-arm64:gitlab-ci + - scripts/gitlab/test.sh beta + +test-rust-nightly: + <<: *optional_test + script: + - scripts/gitlab/test.sh nightly + +test-lint-rustfmt: + <<: *optional_test + script: + - scripts/gitlab/rustfmt.sh + +test-lint-clippy: + <<: *optional_test + script: + - scripts/gitlab/clippy.sh + +test-coverage-kcov: + stage: test only: - - beta - - tags - - stable - - triggers + - master script: - - scripts/gitlab-build.sh aarch64-unknown-linux-gnu aarch64-unknown-linux-gnu arm64 aarch64-linux-gnu-gcc aarch64-linux-gnu-g++ linux + - scripts/gitlab/coverage.sh tags: - - rust-arm - artifacts: - paths: - - parity.zip - name: "aarch64-unknown-linux-gnu_parity" -linux-snap: - stage: build - image: parity/snapcraft:gitlab-ci - only: - - stable - - beta - - tags - - triggers + - shell + allow_failure: true + + +#### stage: build + +build-linux-ubuntu-amd64: &build + stage: build + only: *releaseable_branches + variables: + CARGO_TARGET: x86_64-unknown-linux-gnu script: - - scripts/gitlab-build.sh x86_64-unknown-snap-gnu x86_64-unknown-linux-gnu amd64 gcc g++ snap + - scripts/gitlab/build-unix.sh + <<: *collect_artifacts tags: - rust-stable - artifacts: - paths: - - parity.zip - name: "stable-x86_64-unknown-snap-gnu_parity" -darwin: - stage: build - only: - - beta - - tags - - stable - - triggers - script: - - scripts/gitlab-build.sh x86_64-apple-darwin x86_64-apple-darwin macos gcc g++ macos + allow_failure: true + +build-linux-ubuntu-i386: + <<: *build + image: parity/rust-i686:gitlab-ci + variables: + CARGO_TARGET: i686-unknown-linux-gnu + +build-linux-ubuntu-arm64: + <<: *build + image: parity/rust-arm64:gitlab-ci + variables: + CARGO_TARGET: aarch64-unknown-linux-gnu + +build-linux-ubuntu-armhf: + <<: *build + image: parity/rust-armv7:gitlab-ci + variables: + CARGO_TARGET: armv7-unknown-linux-gnueabihf + +build-linux-android-armhf: + <<: *build + image: parity/rust-android:gitlab-ci + variables: + CARGO_TARGET: armv7-linux-androideabi + +build-darwin-macos-x86_64: + <<: *build + variables: + CARGO_TARGET: x86_64-apple-darwin + CC: gcc + CXX: g++ tags: - osx - artifacts: - paths: - - parity.zip - name: "x86_64-apple-darwin_parity" -windows: + <<: *collect_artifacts + +build-windows-msvc-x86_64: + stage: build + only: *releaseable_branches cache: - key: "%CI_BUILD_STAGE%-%CI_BUILD_REF_NAME%" - untracked: true - stage: build - only: - - beta - - tags - - stable - - triggers + key: "%CI_JOB_NAME%" + paths: + - "%CI_PROJECT_DIR%/target/" + - "%CI_PROJECT_DIR%/cargo/" + # No cargo caching, since fetch-locking on Windows gets stuck + variables: + CARGO_TARGET: x86_64-pc-windows-msvc script: - - sh scripts/gitlab-build.sh x86_64-pc-windows-msvc x86_64-pc-windows-msvc amd64 "" "" windows + - sh scripts/gitlab/build-windows.sh tags: - rust-windows - artifacts: - paths: - - parity.zip - name: "x86_64-pc-windows-msvc_parity" -android-armv7: - stage: build - image: parity/parity-android:latest - only: - - beta - - tags - - stable - - triggers + <<: *collect_artifacts + + +#### stage: package + +package-linux-snap-amd64: &package_snap + stage: package + only: *releaseable_branches + cache: {} + before_script: + - *determine_version + variables: + CARGO_TARGET: x86_64-unknown-linux-gnu + dependencies: + - build-linux-ubuntu-amd64 script: - - cargo build --target=armv7-linux-androideabi + - scripts/gitlab/package-snap.sh tags: - - rust-arm - allow_failure: true - artifacts: - paths: - - parity.zip - name: "armv7-linux-androideabi_parity" -docker-build: - stage: build - only: - - tags - - master - - beta - - stable - - triggers + - rust-stable + <<: *collect_artifacts + +package-linux-snap-i386: + <<: *package_snap + variables: + BUILD_ARCH: i386 + CARGO_TARGET: i686-unknown-linux-gnu + dependencies: + - build-linux-ubuntu-i386 + +package-linux-snap-arm64: + <<: *package_snap + variables: + BUILD_ARCH: arm64 + CARGO_TARGET: aarch64-unknown-linux-gnu + dependencies: + - build-linux-ubuntu-arm64 + +package-linux-snap-armhf: + <<: *package_snap + variables: + BUILD_ARCH: armhf + CARGO_TARGET: armv7-unknown-linux-gnueabihf + dependencies: + - build-linux-ubuntu-armhf + + +#### stage: publish + +publish-linux-snap-amd64: &publish_snap + stage: publish + only: *publishable_branches + image: snapcore/snapcraft:stable + cache: {} before_script: - - docker info + - *determine_version + variables: + BUILD_ARCH: amd64 + dependencies: + - package-linux-snap-amd64 script: - - if [ "$CI_BUILD_REF_NAME" == "master" ]; then DOCKER_TAG="latest"; else DOCKER_TAG=$CI_BUILD_REF_NAME; fi - - echo "Tag:" $DOCKER_TAG - - docker login -u $Docker_Hub_User_Parity -p $Docker_Hub_Pass_Parity - - scripts/docker-build.sh $DOCKER_TAG - - docker logout + - scripts/gitlab/publish-snap.sh tags: - - docker -test-coverage: - stage: test - only: - - master - script: - - scripts/gitlab-test.sh test-coverage + - rust-stable + +publish-linux-snap-i386: + <<: *publish_snap + before_script: + - *determine_version + variables: + BUILD_ARCH: i386 + dependencies: + - package-linux-snap-i386 + +publish-linux-snap-arm64: + <<: *publish_snap + before_script: + - *determine_version + variables: + BUILD_ARCH: arm64 + dependencies: + - package-linux-snap-arm64 + +publish-linux-snap-armhf: + <<: *publish_snap + before_script: + - *determine_version + variables: + BUILD_ARCH: armhf + dependencies: + - package-linux-snap-armhf + +publish-docker-parity-amd64: &publish_docker + stage: publish + only: *publishable_branches + cache: {} + dependencies: + - build-linux-ubuntu-amd64 tags: - - kcov - allow_failure: true -test-rust-stable: - stage: test - image: parity/rust:gitlab-ci + - shell + allow_failure: true script: - - scripts/gitlab-test.sh stable - tags: - - rust-stable -test-rust-beta: - stage: test - only: - - triggers - - master - image: parity/rust:gitlab-ci + - scripts/gitlab/publish-docker.sh parity + +publish-docker-parityevm-amd64: + <<: *publish_docker script: - - scripts/gitlab-test.sh beta - tags: - - rust-beta - allow_failure: true -test-rust-nightly: - stage: test - only: - - triggers - - master - image: parity/rust:gitlab-ci + - scripts/gitlab/publish-docker.sh parity-evm + +publish-github-and-s3: + stage: publish + only: *publishable_branches + cache: {} + dependencies: + - build-linux-ubuntu-amd64 + - build-linux-ubuntu-i386 + - build-linux-ubuntu-armhf + - build-linux-ubuntu-arm64 + - build-darwin-macos-x86_64 + - build-windows-msvc-x86_64 + before_script: + - *determine_version script: - - scripts/gitlab-test.sh nightly + - scripts/gitlab/push.sh tags: - - rust - - rust-nightly - allow_failure: true -json-rpc-docs: + - shell + allow_failure: true + + +####stage: docs + +docs-rpc-json: stage: docs only: - tags - image: parity/rust:gitlab-ci - script: - - scripts/gitlab-rpc-docs.sh - tags: - - docs -push-release: - stage: push-release - only: - - tags - - triggers - image: parity/rust:gitlab-ci + cache: {} script: - - scripts/gitlab-push-release.sh + - scripts/gitlab/rpc-docs.sh tags: - - curl + - shell diff --git a/docker/hub/Dockerfile b/docker/hub/Dockerfile index eb007dc10a8..4eec8cfc61c 100644 --- a/docker/hub/Dockerfile +++ b/docker/hub/Dockerfile @@ -1,64 +1,27 @@ FROM ubuntu:xenial MAINTAINER Parity Technologies -WORKDIR /build -#ENV for build TAG -ARG BUILD_TAG -ENV BUILD_TAG ${BUILD_TAG:-master} -RUN echo "Build tag:" $BUILD_TAG +#set ENVIROMENT +ARG TARGET +ENV TARGET ${TARGET} + # install tools and dependencies -RUN apt-get update && \ - apt-get install -y --force-yes --no-install-recommends \ - # make - build-essential \ - # add-apt-repository - software-properties-common \ - make \ - cmake \ - curl \ - wget \ - git \ - g++ \ - gcc \ - libc6 \ - libc6-dev \ - binutils \ - file \ - libudev-dev \ - pkg-config \ - dpkg-dev &&\ -# install rustup - curl https://sh.rustup.rs -sSf | sh -s -- -y && \ -# rustup directory - PATH=/root/.cargo/bin:$PATH && \ +RUN apt update && apt install -y --no-install-recommends openssl libudev-dev file + # show backtraces - RUST_BACKTRACE=1 && \ -# build parity -cd /build&&git clone https://github.com/paritytech/parity-ethereum && \ - cd parity-ethereum&& \ - git pull&& \ - git checkout $BUILD_TAG && \ - cargo build --verbose --release --features final && \ - strip /build/parity-ethereum/target/release/parity && \ - file /build/parity-ethereum/target/release/parity&&mkdir -p /parity&& cp /build/parity-ethereum/target/release/parity /parity&&\ +ENV RUST_BACKTRACE 1 + #cleanup Docker image - rm -rf /root/.cargo&&rm -rf /root/.multirust&&rm -rf /root/.rustup&&rm -rf /build&&\ - apt-get purge -y \ - # make - build-essential \ - # add-apt-repository - software-properties-common \ - make \ - cmake \ - curl \ - wget \ - git \ - g++ \ - gcc \ - binutils \ - file \ - pkg-config \ - dpkg-dev &&\ - rm -rf /var/lib/apt/lists/* +RUN apt autoremove -y +RUN apt clean -y +RUN rm -rf /tmp/* /var/tmp/* /var/lib/apt/lists/* + +#add TARGET to docker image +COPY artifacts/x86_64-unknown-linux-gnu/$TARGET /usr/bin/$TARGET + +# Build a shell script because the ENTRYPOINT command doesn't like using ENV +RUN echo "#!/bin/bash \n ${TARGET} \$@" > ./entrypoint.sh +RUN chmod +x ./entrypoint.sh + # setup ENTRYPOINT -EXPOSE 8080 8545 8180 -ENTRYPOINT ["/parity/parity"] +EXPOSE 5001 8080 8082 8083 8545 8546 8180 30303/tcp 30303/udp +ENTRYPOINT ["./entrypoint.sh"] diff --git a/scripts/cov.sh b/scripts/cov.sh deleted file mode 100755 index b6d25c6921c..00000000000 --- a/scripts/cov.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash -# Installing KCOV under ubuntu -# https://users.rust-lang.org/t/tutorial-how-to-collect-test-coverages-for-rust-project/650# -### Install deps -# $ sudo apt-get install libcurl4-openssl-dev libelf-dev libdw-dev cmake gcc binutils-dev libiberty-dev -# -### Compile kcov -# $ wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz && tar xf master.tar.gz -# $ cd kcov-master && mkdir build && cd build -# $ cmake .. && make && sudo make install - -### Running coverage - -set -x -RUSTFLAGS="-C link-dead-code" cargo test --all --no-run || exit $? -KCOV_TARGET="target/cov" -KCOV_FLAGS="--verify" -mkdir -p $KCOV_TARGET -echo "Cover RUST" -for FILE in `find target/debug/deps ! -name "*.*"` -do - timeout --signal=SIGKILL 5m kcov --include-path=$(pwd) --exclude-path=$(pwd)/target $KCOV_FLAGS $KCOV_TARGET $FILE -done -timeout --signal=SIGKILL 5m kcov --include-path=$(pwd) --exclude-path=$(pwd)/target $KCOV_FLAGS $KCOV_TARGET target/debug/parity-* -echo "Cover JS" -cd js -npm install&&npm run test:coverage -cd .. -bash <(curl -s https://codecov.io/bash)&& - echo "Uploaded code coverage" -exit 0 diff --git a/scripts/docker-build.sh b/scripts/docker-build.sh deleted file mode 100755 index b880d33b71a..00000000000 --- a/scripts/docker-build.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash -cd docker/hub -DOCKER_BUILD_TAG=$1 -echo "Docker build tag: " $DOCKER_BUILD_TAG -if [[ "$DOCKER_BUILD_TAG" = "latest" ]]; then - docker build --build-arg BUILD_TAG="master" --no-cache=true --tag parity/parity:$DOCKER_BUILD_TAG . -else - docker build --build-arg BUILD_TAG=$DOCKER_BUILD_TAG --no-cache=true --tag parity/parity:$DOCKER_BUILD_TAG . -fi -docker run -it parity/parity:$DOCKER_BUILD_TAG -v -docker push parity/parity:$DOCKER_BUILD_TAG diff --git a/scripts/gitlab-build.sh b/scripts/gitlab-build.sh deleted file mode 100755 index 014cab8e622..00000000000 --- a/scripts/gitlab-build.sh +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/env bash - -set -e # fail on any error -set -u # treat unset variables as error -#ARGUMENTS: 1. BUILD_PLATFORM (target for binaries) 2. PLATFORM (target for cargo) 3. ARC (architecture) 4. & 5. CC & CXX flags 6. binary identifier -BUILD_PLATFORM=$1 -PLATFORM=$2 -ARC=$3 -CC=$4 -CXX=$5 -IDENT=$6 -VER="$(grep -m 1 "version =" Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")" -S3WIN="" -echo "--------------------" -echo "Build for platform: " $BUILD_PLATFORM -echo "Build identifier: " $IDENT -echo "Cargo target: " $PLATFORM -echo "CC&CXX flags: " $CC ", " $CXX -echo "Architecture: " $ARC -echo "Parity version: " $VER -echo "Branch: " $CI_BUILD_REF_NAME -echo "--------------------" - -# NOTE for sha256 we want to display filename as well -# hence we use --* instead of -p * -SHA256_BIN="rhash --sha256" - -set_env () { - echo "Set ENVIROMENT" - export HOST_CC=gcc - export HOST_CXX=g++ - rm -rf .cargo - mkdir -p .cargo - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"$CC\"" >> .cargo/config - cat .cargo/config -} -set_env_win () { - set PLATFORM=x86_64-pc-windows-msvc - set INCLUDE="C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt" - set LIB="C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64" - set RUST_BACKTRACE=1 - #export RUSTFLAGS=$RUSTFLAGS - rustup default stable-x86_64-pc-windows-msvc - echo "@ signtool sign /f "\%"1 /p "\%"2 /tr http://timestamp.comodoca.com /du https://parity.io "\%"3" > sign.cmd -} -build () { - if [[ "windows" = $IDENT ]] - then - # This is a nasty hack till we figure out the proper cargo caching strategy - echo "Remove index" - rm -rf cargo/registry/index/*. - fi - echo "Build parity:" - cargo build --target $PLATFORM --features final --release - echo "Build evmbin:" - cargo build --target $PLATFORM --release -p evmbin - echo "Build ethstore-cli:" - cargo build --target $PLATFORM --release -p ethstore-cli - echo "Build ethkey-cli:" - cargo build --target $PLATFORM --release -p ethkey-cli - echo "Build whisper-cli:" - cargo build --target $PLATFORM --release -p whisper-cli -} -strip_binaries () { - echo "Strip binaries:" - $STRIP_BIN -v target/$PLATFORM/release/parity - $STRIP_BIN -v target/$PLATFORM/release/parity-evm - $STRIP_BIN -v target/$PLATFORM/release/ethstore - $STRIP_BIN -v target/$PLATFORM/release/ethkey - $STRIP_BIN -v target/$PLATFORM/release/whisper; -} -calculate_checksums () { - echo "Checksum calculation:" - rhash --version - - rm -rf *.sha256 - - BIN="target/$PLATFORM/release/parity$S3WIN" - export SHA3="$($BIN tools hash $BIN)" - - echo "Parity file SHA3: $SHA3" - $SHA256_BIN target/$PLATFORM/release/parity$S3WIN > parity$S3WIN.sha256 - $SHA256_BIN target/$PLATFORM/release/parity-evm$S3WIN > parity-evm$S3WIN.sha256 - $SHA256_BIN target/$PLATFORM/release/ethstore$S3WIN > ethstore$S3WIN.sha256 - $SHA256_BIN target/$PLATFORM/release/ethkey$S3WIN > ethkey$S3WIN.sha256 - $SHA256_BIN target/$PLATFORM/release/whisper$S3WIN > whisper$S3WIN.sha256 -} -sign_exe () { - ./sign.cmd $keyfile $certpass "target/$PLATFORM/release/parity.exe" - ./sign.cmd $keyfile $certpass "target/$PLATFORM/release/parity-evm.exe" - ./sign.cmd $keyfile $certpass "target/$PLATFORM/release/ethstore.exe" - ./sign.cmd $keyfile $certpass "target/$PLATFORM/release/ethkey.exe" - ./sign.cmd $keyfile $certpass "target/$PLATFORM/release/whisper.exe" -} -push_binaries () { - echo "Push binaries to AWS S3" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ "$CI_BUILD_REF_NAME" = "beta" || "$CI_BUILD_REF_NAME" = "stable" || "$CI_BUILD_REF_NAME" = "nightly" ]]; - then - export S3_BUCKET=builds-parity-published; - else - export S3_BUCKET=builds-parity; - fi - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$BUILD_PLATFORM - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity$S3WIN --body target/$PLATFORM/release/parity$S3WIN - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity$S3WIN.sha256 --body parity$S3WIN.sha256 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity-evm$S3WIN --body target/$PLATFORM/release/parity-evm$S3WIN - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity-evm$S3WIN.sha256 --body parity-evm$S3WIN.sha256 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethstore$S3WIN --body target/$PLATFORM/release/ethstore$S3WIN - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethstore$S3WIN.sha256 --body ethstore$S3WIN.sha256 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethkey$S3WIN --body target/$PLATFORM/release/ethkey$S3WIN - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethkey$S3WIN.sha256 --body ethkey$S3WIN.sha256 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/whisper$S3WIN --body target/$PLATFORM/release/whisper$S3WIN - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/whisper$S3WIN.sha256 --body whisper$S3WIN.sha256 -} - -make_archive () { - echo "add artifacts to archive" - rm -rf parity.zip - zip -r parity.zip target/$PLATFORM/release/parity$S3WIN target/$PLATFORM/release/parity-evm$S3WIN target/$PLATFORM/release/ethstore$S3WIN target/$PLATFORM/release/ethkey$S3WIN target/$PLATFORM/release/whisper$S3WIN parity$S3WIN.sha256 parity-evm$S3WIN.sha256 ethstore$S3WIN.sha256 ethkey$S3WIN.sha256 whisper$S3WIN.sha256 -} - -updater_push_release () { - echo "push release" - - DATA="commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity$S3WIN&secret=$RELEASES_SECRET" - # Mainnet - source scripts/safe_curl.sh $DATA "http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$BUILD_PLATFORM" - # Kovan - source scripts/safe_curl.sh $DATA "http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$BUILD_PLATFORM" -} - -case $BUILD_PLATFORM in - x86_64-unknown-linux-gnu) - #set strip bin - STRIP_BIN="strip" - #package extention - build - strip_binaries - calculate_checksums - make_archive - push_binaries - updater_push_release - ;; - i686-unknown-linux-gnu) - STRIP_BIN="strip" - set_env - build - strip_binaries - calculate_checksums - make_archive - push_binaries - ;; - armv7-unknown-linux-gnueabihf) - STRIP_BIN="arm-linux-gnueabihf-strip" - set_env - build - strip_binaries - calculate_checksums - make_archive - push_binaries - ;; - arm-unknown-linux-gnueabihf) - STRIP_BIN="arm-linux-gnueabihf-strip" - set_env - build - strip_binaries - calculate_checksums - make_archive - push_binaries - ;; - aarch64-unknown-linux-gnu) - STRIP_BIN="aarch64-linux-gnu-strip" - set_env - build - strip_binaries - calculate_checksums - make_archive - push_binaries - ;; - x86_64-apple-darwin) - STRIP_BIN="strip" - PLATFORM="x86_64-apple-darwin" - build - strip_binaries - calculate_checksums - make_archive - push_binaries - updater_push_release - ;; - x86_64-unknown-snap-gnu) - ARC="amd64" - EXT="snap" - apt update - apt install -y expect zip rhash - snapcraft clean - echo "Prepare snapcraft.yaml for build on Gitlab CI in Docker image" - sed -i 's/git/'"$VER"'/g' snap/snapcraft.yaml - if [[ "$CI_BUILD_REF_NAME" = "stable" || "$CI_BUILD_REF_NAME" = "beta" || "$VER" == *1.11* || "$VER" == *2.0* ]]; - then - sed -i -e 's/grade: devel/grade: stable/' snap/snapcraft.yaml; - fi - mv -f snap/snapcraft.yaml snapcraft.yaml - snapcraft -d - snapcraft_login=$(expect -c " - spawn snapcraft login - expect \"Email:\" - send \"$SNAP_EMAIL\n\" - expect \"Password:\" - send \"$SNAP_PASS\n\" - expect \"\$\" - ") - echo "$snapcraft_login" - snapcraft push "parity_"$VER"_amd64.snap" - snapcraft status parity - snapcraft logout - $SHA256_BIN "parity_"$VER"_amd64.snap" > "parity_"$VER"_amd64.snap.sha256" - echo "add artifacts to archive" - rm -rf parity.zip - zip -r parity.zip "parity_"$VER"_amd64.snap" "parity_"$VER"_amd64.snap.sha256" - ;; - x86_64-pc-windows-msvc) - set_env_win - EXT="exe" - S3WIN=".exe" - build - sign_exe - calculate_checksums - make_archive - push_binaries - updater_push_release -esac diff --git a/scripts/gitlab-push-release.sh b/scripts/gitlab-push-release.sh deleted file mode 100755 index 207d66c11cb..00000000000 --- a/scripts/gitlab-push-release.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e # fail on any error -set -u # treat unset variables as error - -DATA="secret=$RELEASES_SECRET" - -echo "Pushing release to Mainnet" -./scripts/safe_curl.sh $DATA "http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF" - -echo "Pushing release to Kovan" -./scripts/safe_curl.sh $DATA "http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF" diff --git a/scripts/gitlab-test.sh b/scripts/gitlab-test.sh deleted file mode 100755 index fbd93167a33..00000000000 --- a/scripts/gitlab-test.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash -#ARGUMENT test for RUST and COVERAGE -set -e # fail on any error -set -u # treat unset variables as error -if [[ "$CI_COMMIT_REF_NAME" = "master" || "$CI_COMMIT_REF_NAME" = "beta" || "$CI_COMMIT_REF_NAME" = "stable" ]]; then - export GIT_COMPARE=$CI_COMMIT_REF_NAME~; -else - export GIT_COMPARE=master; -fi -git fetch -a -export RUST_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep -v -e ^\\. -e ^LICENSE -e ^README.md -e ^test.sh -e ^windows/ -e ^scripts/ -e ^mac/ -e ^nsis/ -e ^docs/ | wc -l)" -echo "RUST_FILES_MODIFIED: $RUST_FILES_MODIFIED" -TEST_SWITCH=$1 -rust_test () { - git submodule update --init --recursive - rustup show - if [[ "${RUST_FILES_MODIFIED}" == "0" ]]; - then echo "Skipping Rust tests since no Rust files modified."; - else ./test.sh || exit $?; - fi - # if [[ "$CI_COMMIT_REF_NAME" == "nightly" ]]; - # ### @TODO re-enable fail after https://github.com/paritytech/parity-import-tests/issues/3 - # then sh scripts/aura-test.sh; # || exit $?; - # fi -} -coverage_test () { - git submodule update --init --recursive - rm -rf target/* - scripts/cov.sh -} -case $TEST_SWITCH in - stable ) - rustup default stable - rust_test - ;; - beta) - rustup default beta - rust_test - ;; - nightly) - rustup default nightly - rust_test - ;; - test-coverage) - coverage_test - ;; -esac diff --git a/scripts/gitlab/build-unix.sh b/scripts/gitlab/build-unix.sh new file mode 100755 index 00000000000..50ae7314c3d --- /dev/null +++ b/scripts/gitlab/build-unix.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error +echo "__________Show ENVIROMENT__________" +echo "CC: " $CC +echo "CXX: " $CXX + +echo "__________CARGO CONFIG__________" +rm -rf .cargo +mkdir -p .cargo +echo "[target.$CARGO_TARGET]" >> .cargo/config +echo "linker= \"$CC\"" >> .cargo/config +cat .cargo/config + +echo "_____ Building target: "$CARGO_TARGET" _____" +time cargo build --target $CARGO_TARGET --release --features final +time cargo build --target $CARGO_TARGET --release -p evmbin +time cargo build --target $CARGO_TARGET --release -p ethstore-cli +time cargo build --target $CARGO_TARGET --release -p ethkey-cli +time cargo build --target $CARGO_TARGET --release -p whisper-cli + +echo "_____ Post-processing binaries _____" +rm -rf artifacts +mkdir -p artifacts +cd artifacts +mkdir -p $CARGO_TARGET +cd $CARGO_TARGET +cp ../../target/$CARGO_TARGET/release/{parity,parity-evm,ethstore,ethkey,whisper} . +strip -v ./* +echo "_____ Calculating checksums _____" +for binary in $(ls) +do + rhash --sha256 $binary -o $binary.sha256 +done diff --git a/scripts/gitlab/build-windows.sh b/scripts/gitlab/build-windows.sh new file mode 100755 index 00000000000..a5510875c87 --- /dev/null +++ b/scripts/gitlab/build-windows.sh @@ -0,0 +1,35 @@ +#!/bin/bash +set -e # fail on any error +set -u # treat unset variables as error + +set INCLUDE="C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt" +set LIB="C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64" + +rustup default stable-x86_64-pc-windows-msvc +echo "_____ Building _____" +time cargo build --target $CARGO_TARGET --release --features final +time cargo build --target $CARGO_TARGET --release -p evmbin +time cargo build --target $CARGO_TARGET --release -p ethstore-cli +time cargo build --target $CARGO_TARGET --release -p ethkey-cli +time cargo build --target $CARGO_TARGET --release -p whisper-cli +echo "__________Sign binaries__________" +scripts/gitlab/sign.cmd $keyfile $certpass target/$CARGO_TARGET/release/parity.exe +scripts/gitlab/sign.cmd $keyfile $certpass target/$CARGO_TARGET/release/parity-evm.exe +scripts/gitlab/sign.cmd $keyfile $certpass target/$CARGO_TARGET/release/ethstore.exe +scripts/gitlab/sign.cmd $keyfile $certpass target/$CARGO_TARGET/release/ethkey.exe +scripts/gitlab/sign.cmd $keyfile $certpass target/$CARGO_TARGET/release/whisper.exe + +echo "_____ Post-processing binaries _____" +rm -rf artifacts +mkdir -p artifacts +cd artifacts +mkdir -p $CARGO_TARGET +cd $CARGO_TARGET +cp --verbose ../../target/$CARGO_TARGET/release/{parity.exe,parity-evm.exe,ethstore.exe,ethkey.exe,whisper.exe} . + +echo "_____ Calculating checksums _____" +for binary in $(ls) +do + rhash --sha256 $binary -o $binary.sha256 +done +cp parity.exe.sha256 parity.sha256 diff --git a/scripts/gitlab/clippy.sh b/scripts/gitlab/clippy.sh new file mode 100755 index 00000000000..aef39c29fbc --- /dev/null +++ b/scripts/gitlab/clippy.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error + +cargo install clippy +cargo clippy -- -D warnings diff --git a/scripts/gitlab/coverage.sh b/scripts/gitlab/coverage.sh new file mode 100755 index 00000000000..1ddf340a180 --- /dev/null +++ b/scripts/gitlab/coverage.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -x +git submodule update --init --recursive +rm -rf target/* +cargo test --all --exclude evmjit --no-run -- --test-threads 8|| exit $? +KCOV_TARGET="target/cov" +KCOV_FLAGS="--verify" +EXCLUDE="/usr/lib,/usr/include,$HOME/.cargo,$HOME/.multirust,rocksdb,secp256k1" +mkdir -p $KCOV_TARGET +echo "__________Cover RUST___________" +for FILE in `find target/debug/deps ! -name "*.*"` + do + timeout --signal=SIGKILL 5m kcov --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET $FILE + done +timeout --signal=SIGKILL 5m kcov --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET target/debug/parity-* +echo "Cover JS" +bash <(curl -s https://codecov.io/bash)&& +echo "Uploaded code coverage" +exit 0 diff --git a/scripts/gitlab/install-readme.sh b/scripts/gitlab/install-readme.sh new file mode 100755 index 00000000000..531bae9b77a --- /dev/null +++ b/scripts/gitlab/install-readme.sh @@ -0,0 +1,8 @@ +echo "Parity Wallet +============= + +Welcome to Parity Wallet, your all-in-one Ethereum node and wallet. + +If you continue, Parity will be installed as a user service. You will be able to use the Parity Wallet through your browser by using the menu bar icon, following the shortcut in the Launchpad or navigating to http://localhost:8180/ in your browser. + +Parity is distributed under the terms of the GPL." diff --git a/scripts/gitlab/package-snap.sh b/scripts/gitlab/package-snap.sh new file mode 100755 index 00000000000..168ab475155 --- /dev/null +++ b/scripts/gitlab/package-snap.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error +case ${CI_COMMIT_REF_NAME} in + nightly|*v2.1*) export GRADE="devel";; + beta|*v2.0*) export GRADE="stable";; + stable|*v1.11*) export GRADE="stable";; + *) echo "No release" exit 0;; +esac +SNAP_PACKAGE="parity_"$VERSION"_"$BUILD_ARCH".snap" +echo "__________Create snap package__________" +echo "Release channel :" $GRADE " Branch/tag: " $CI_COMMIT_REF_NAME +snapcraft clean +echo $VERSION:$GRADE:$BUILD_ARCH +cat scripts/gitlab/templates/snapcraft.template.yaml | envsubst '$VERSION:$GRADE:$BUILD_ARCH:$CARGO_TARGET' > snapcraft.yaml +cat snapcraft.yaml +snapcraft --target-arch=$BUILD_ARCH +ls *.snap +echo "__________Post-processing snap package__________" +mkdir -p artifacts +mv -v $SNAP_PACKAGE "artifacts/"$SNAP_PACKAGE +echo "_____ Calculating checksums _____" +cd artifacts +rhash --sha256 $SNAP_PACKAGE -o $SNAP_PACKAGE".sha256" diff --git a/scripts/gitlab/publish-docker.sh b/scripts/gitlab/publish-docker.sh new file mode 100755 index 00000000000..41c2e3db48a --- /dev/null +++ b/scripts/gitlab/publish-docker.sh @@ -0,0 +1,22 @@ +#!/bin/bash +##ARGUMENTS: 1. Docker target +set -e # fail on any error +set -u # treat unset variables as error + +if [ "$CI_COMMIT_REF_NAME" == "beta" ]; +then export DOCKER_BUILD_TAG="latest"; +else export DOCKER_BUILD_TAG=$CI_COMMIT_REF_NAME; +fi +docker login -u $Docker_Hub_User_Parity -p $Docker_Hub_Pass_Parity + +echo "__________Docker TAG__________" +echo $DOCKER_BUILD_TAG + +echo "__________Docker target__________" +export DOCKER_TARGET=$1 +echo $DOCKER_TARGET + +echo "__________Docker build and push__________" +docker build --build-arg TARGET=$DOCKER_TARGET --no-cache=true --tag parity/$DOCKER_TARGET:$DOCKER_BUILD_TAG -f docker/hub/Dockerfile . +docker push parity/$DOCKER_TARGET:$DOCKER_BUILD_TAG +docker logout diff --git a/scripts/gitlab/publish-snap.sh b/scripts/gitlab/publish-snap.sh new file mode 100755 index 00000000000..35644c8d754 --- /dev/null +++ b/scripts/gitlab/publish-snap.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error + +case ${CI_COMMIT_REF_NAME} in + nightly|*v2.1*) export CHANNEL="edge";; + beta|*v2.0*) export CHANNEL="beta";; + stable|*v1.11*) export CHANNEL="stable";; + *) echo "No release" exit 0;; +esac +echo "Release channel :" $CHANNEL " Branch/tag: " $CI_COMMIT_REF_NAME + +echo $SNAPCRAFT_LOGIN_PARITY_BASE64 | base64 --decode > snapcraft.login +snapcraft login --with snapcraft.login +snapcraft push --release $CHANNEL "artifacts/parity_"$VERSION"_"$BUILD_ARCH".snap" +snapcraft status parity +snapcraft logout diff --git a/scripts/gitlab/push.sh b/scripts/gitlab/push.sh new file mode 100755 index 00000000000..718f291fe1f --- /dev/null +++ b/scripts/gitlab/push.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error +updater_push_release () { + echo "push release" + # Mainnet + +} +echo "__________Set ENVIROMENT__________" +DESCRIPTION="$(cat CHANGELOG.md)" +RELEASE_TABLE="$(cat scripts/gitlab/templates/release-table.md)" +RELEASE_TABLE="$(echo "${RELEASE_TABLE//\$VERSION/${VERSION}}")" +#The text in the file CANGELOG.md before which the table with links is inserted. Must be present in this file necessarily +REPLACE_TEXT="The full list of included changes:" +case ${CI_COMMIT_REF_NAME} in + nightly|*v2.1*) NAME="Parity "$VERSION" nightly";; + beta|*v2.0*) NAME="Parity "$VERSION" beta";; + stable|*v1.11*) NAME="Parity "$VERSION" stable";; + *) echo "No release" exit 0;; +esac +cd artifacts +ls -l | sort -k9 +filetest=( * ) +echo ${filetest[*]} +for DIR in "${filetest[@]}"; +do + cd $DIR + if [[ $DIR == "*windows*" ]]; + then + WIN=".exe"; + else + WIN=""; + fi + for binary in $(ls parity.sha256) + do + sha256=$(cat $binary | awk '{ print $1}' ) + case $DIR in + x86_64* ) + DATA="commit=$CI_BUILD_REF&sha3=$sha256&filename=parity$WIN&secret=$RELEASES_SECRET" + ../../scripts/gitlab/safe_curl.sh $DATA "http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$DIR" + # Kovan + ../../scripts/gitlab/safe_curl.sh $DATA "http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$DIR" + ;; + esac + RELEASE_TABLE="$(echo "${RELEASE_TABLE/sha$DIR/${sha256}}")" + done + cd .. +done +#do not touch the following 3 lines. Features of output in Markdown +DESCRIPTION="$(echo "${DESCRIPTION/${REPLACE_TEXT}/${RELEASE_TABLE} + +${REPLACE_TEXT}}")" +echo "$DESCRIPTION" +if [["$CI_COMMIT_REF_NAME" == "nightly" ]]; then DESCRIPTION=""; fi #TODO in the future, we need to prepare a script that will do changelog +echo "__________Create release to Github____________" +github-release release --user devops-parity --repo parity-ethereum --tag "$CI_COMMIT_REF_NAME" --draft --name "$NAME" --description "$DESCRIPTION" +echo "__________Push binaries to AWS S3____________" +aws configure set aws_access_key_id $s3_key +aws configure set aws_secret_access_key $s3_secret +if [[ "$CI_BUILD_REF_NAME" = "beta" || "$CI_BUILD_REF_NAME" = "stable" || "$CI_BUILD_REF_NAME" = "nightly" ]]; + then + export S3_BUCKET=builds-parity-published; + else + export S3_BUCKET=builds-parity; +fi +aws s3 sync ./ s3://$S3_BUCKET/$CI_BUILD_REF_NAME/ diff --git a/scripts/gitlab/rpc-docs.sh b/scripts/gitlab/rpc-docs.sh new file mode 100755 index 00000000000..a01ab1280be --- /dev/null +++ b/scripts/gitlab/rpc-docs.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error + +clone_repos() { + echo "__________Clone repos__________" + git clone https://github.com/parity-js/jsonrpc.git jsonrpc + git clone https://github.com/paritytech/wiki.git wiki +} + +build_docs() { + echo "__________Build docs__________" + npm install + npm run build:markdown +} + +update_wiki_docs() { + echo "__________Update WIKI docs__________" + for file in $(ls jsonrpc/docs); do + module_name=${file:0:-3} + mv jsonrpc/docs/$file wiki/JSONRPC-$module_name-module.md + done +} + +setup_git() { + echo "__________Set github__________" + git config user.email "devops@parity.com" + git config user.name "Devops Parity" +} + +commit_files() { + echo "__________Commit files__________" + git checkout -b rpcdoc-update-${CI_COMMIT_REF_NAME} + git add . + git commit -m "Update docs to ${CI_COMMIT_REF_NAME}" + git tag -a "${CI_COMMIT_REF_NAME}" +} + +upload_files() { + echo "__________Upload files__________" + git push --tags +} + +setup_git +clone_repos +cp -r parity/ jsonrpc/.parity/ +cd jsonrpc +build_docs +cd .. +update_wiki_docs +cd wiki +commit_files +upload_files diff --git a/scripts/gitlab/rustfmt.sh b/scripts/gitlab/rustfmt.sh new file mode 100755 index 00000000000..447d66f24ec --- /dev/null +++ b/scripts/gitlab/rustfmt.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -e # fail on any error +set -u # treat unset variables as error + +cargo install rustfmt-nightly +cargo fmt -- --write-mode=diff diff --git a/scripts/safe_curl.sh b/scripts/gitlab/safe_curl.sh similarity index 100% rename from scripts/safe_curl.sh rename to scripts/gitlab/safe_curl.sh diff --git a/scripts/gitlab/sign.cmd b/scripts/gitlab/sign.cmd new file mode 100755 index 00000000000..2b014a0236c --- /dev/null +++ b/scripts/gitlab/sign.cmd @@ -0,0 +1 @@ +@signtool sign /f %1 /p %2 /tr http://timestamp.comodoca.com /du https://parity.io %3 diff --git a/scripts/gitlab/templates/release-table.md b/scripts/gitlab/templates/release-table.md new file mode 100644 index 00000000000..6145521524e --- /dev/null +++ b/scripts/gitlab/templates/release-table.md @@ -0,0 +1,16 @@ +| OS | Arch | Download | SHA256 Checksum | +|:---:|:---:|:---|:---| +| linux | arm64 | [parity](https://releases.parity.io/$VERSION/aarch64-unknown-linux-gnu/parity) | `shaaarch64-unknown-linux-gnu` | +| android | armv7 | [parity](https://releases.parity.io/$VERSION/armv7-linux-androideabi/parity) | `shaarmv7-linux-androideabi` | +| linux | armv7 | [parity](https://releases.parity.io/$VERSION/armv7-unknown-linux-gnueabihf/parity) | `shaarmv7-unknown-linux-gnueabihf` | +| linux | i686 | [parity](https://releases.parity.io/$VERSION/i686-unknown-linux-gnu/parity) | `shai686-unknown-linux-gnu` | +| osx | x64 | [parity](https://releases.parity.io/$VERSION/x86_64-apple-darwin/parity) | `shax86_64-apple-darwin` | +| windows | x64 | [parity.exe](https://releases.parity.io/$VERSION/x86_64-pc-windows-msvc/parity.exe) | `shax86_64-pc-windows-msvc` | +| linux | x64 | [parity](https://releases.parity.io/$VERSION/x86_64-unknown-linux-gnu/parity) | `shax86_64-unknown-linux-gnu` | + +| OS | Alternative | Link | +|:---:|:---:|:---| +| Apple Icon by Pixel Perfect from https://www.flaticon.com/authors/pixel-perfect | Homebrew |[github.com/paritytech/homebrew-paritytech/blob/master/README.md](https://github.com/paritytech/homebrew-paritytech/blob/master/README.md) | +| Linux Icon by Pixel Perfect from https://www.flaticon.com/authors/pixel-perfect | Snapcraft | [snapcraft.io/parity](https://snapcraft.io/parity/) | +| Settings Icon by Pixel Perfect from https://www.flaticon.com/authors/pixel-perfect | Docker | [hub.docker.com/r/parity/parity](https://hub.docker.com/r/parity/parity) | +| Settings Icon by Pixel Perfect from https://www.flaticon.com/authors/pixel-perfect | Other binaries | [vanity-service.parity.io/parity-binaries?format=markdown&version=$VERSION](https://vanity-service.parity.io/parity-binaries?format=markdown&version=$VERSION) | diff --git a/scripts/gitlab/templates/snapcraft.template.yaml b/scripts/gitlab/templates/snapcraft.template.yaml new file mode 100644 index 00000000000..97bb39aa14a --- /dev/null +++ b/scripts/gitlab/templates/snapcraft.template.yaml @@ -0,0 +1,58 @@ +name: parity +version: $VERSION +architectures: [$BUILD_ARCH] +grade: $GRADE +confinement: strict + +summary: Fast, light, robust Ethereum implementation +description: | + Parity's goal is to be the fastest, lightest, and most secure Ethereum + client. We are developing Parity using the sophisticated and cutting-edge + Rust programming language. Parity is licensed under the GPLv3, and can be + used for all your Ethereum needs. + + +apps: + parity: + command: parity + plugs: [home, network, network-bind, mount-observe, x11, unity7, desktop, desktop-legacy, wayland] + desktop: usr/share/applications/parity.desktop + parity-evm: + command: parity-evm + plugs: [home, network, network-bind] + ethkey: + command: ethkey + plugs: [home] + ethstore: + command: ethstore + plugs: [home] + whisper: + command: whisper + plugs: [home] + +icon: snap/gui/icon.png + +parts: + desktop-icon: + source: ./snap + plugin: nil + override-build: | + mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/applications + mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps + cp -v gui/parity.desktop $SNAPCRAFT_PART_INSTALL/usr/share/applications/ + cp -v gui/icon.png $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps/ + parity: + source: ./artifacts/$CARGO_TARGET + plugin: nil + override-build: | + mkdir -p $SNAPCRAFT_PART_INSTALL/usr/bin + cp -v parity $SNAPCRAFT_PART_INSTALL/usr/bin/parity + cp -v parity-evm $SNAPCRAFT_PART_INSTALL/usr/bin/parity-evm + cp -v ethkey $SNAPCRAFT_PART_INSTALL/usr/bin/ethkey + cp -v ethstore $SNAPCRAFT_PART_INSTALL/usr/bin/ethstore + cp -v whisper $SNAPCRAFT_PART_INSTALL/usr/bin/whisper + stage-packages: [libc6, libssl1.0.0, libudev1, libstdc++6, cmake] + df: + plugin: nil + stage-packages: [coreutils] + stage: [bin/df] diff --git a/scripts/gitlab/test.sh b/scripts/gitlab/test.sh new file mode 100755 index 00000000000..be48c52cca8 --- /dev/null +++ b/scripts/gitlab/test.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# ARGUMENT $1 Rust flavor to test with (stable/beta/nightly) + +set -e # fail on any error +set -u # treat unset variables as error + +rustup default $1 + +if [[ "$CI_COMMIT_REF_NAME" = "beta" || "$CI_COMMIT_REF_NAME" = "stable" ]]; then + export GIT_COMPARE=$CI_COMMIT_REF_NAME~; +else + export GIT_COMPARE=master; +fi + +export RUST_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep -v -e ^\\. -e ^LICENSE -e ^README.md -e ^test.sh -e ^windows/ -e ^scripts/ -e ^mac/ -e ^nsis/ | wc -l)" +echo "RUST_FILES_MODIFIED: $RUST_FILES_MODIFIED" + +git submodule update --init --recursive +rustup show +if [[ "${RUST_FILES_MODIFIED}" == "0" ]]; +then echo "__________Skipping Rust tests since no Rust files modified__________"; +else ./test.sh || exit $?; +fi + +# if [[ "$CI_COMMIT_REF_NAME" == "nightly" ]]; +# ### @TODO re-enable fail after https://github.com/paritytech/parity-import-tests/issues/3 +# then sh scripts/aura-test.sh; # || exit $?; +# fi diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml deleted file mode 100644 index c7da2396002..00000000000 --- a/snap/snapcraft.yaml +++ /dev/null @@ -1,41 +0,0 @@ -name: parity -version: git -summary: Fast, light, robust Ethereum implementation -description: | - Parity's goal is to be the fastest, lightest, and most secure Ethereum - client. We are developing Parity using the sophisticated and cutting-edge - Rust programming language. Parity is licensed under the GPLv3, and can be - used for all your Ethereum needs. - -grade: devel -confinement: strict - -apps: - parity: - command: parity - plugs: [home, network, network-bind, mount-observe, x11, unity7, desktop, desktop-legacy, wayland] - desktop: usr/share/applications/parity.desktop - -icon: snap/gui/icon.png - -parts: - desktop-icon: - source: ./snap - plugin: nil - prepare: | - mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/applications - mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps - cp -v gui/parity.desktop $SNAPCRAFT_PART_INSTALL/usr/share/applications/ - cp -v gui/icon.png $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps/ - parity: - source: . - plugin: rust - # rust-channel: stable # @TODO enable after https://bugs.launchpad.net/snapcraft/+bug/1778530 - rust-revision: 1.26.2 # @TODO remove after https://bugs.launchpad.net/snapcraft/+bug/1778530 - build-attributes: [no-system-libraries] - build-packages: [g++, libudev-dev, make, pkg-config, cmake] - stage-packages: [libc6, libudev1, libstdc++6] - df: - plugin: nil - stage-packages: [coreutils] - stage: [bin/df] diff --git a/test.sh b/test.sh index 82e05d954c5..baa959f3cb4 100755 --- a/test.sh +++ b/test.sh @@ -32,13 +32,13 @@ set -e if [ "$VALIDATE" -eq "1" ]; then # Validate --no-default-features build echo "________Validate build________" -cargo check --no-default-features -cargo check --manifest-path util/io/Cargo.toml --no-default-features -cargo check --manifest-path util/io/Cargo.toml --features "mio" +time cargo check --no-default-features +time cargo check --manifest-path util/io/Cargo.toml --no-default-features +time cargo check --manifest-path util/io/Cargo.toml --features "mio" # Validate chainspecs echo "________Validate chainspecs________" -./scripts/validate_chainspecs.sh +time ./scripts/validate_chainspecs.sh fi @@ -57,4 +57,4 @@ cd parity-clib-examples/cpp && \ # Running tests echo "________Running Parity Full Test Suite________" git submodule update --init --recursive -cargo test -j 8 $OPTIONS --features "$FEATURES" --all $1 +time cargo test $OPTIONS --features "$FEATURES" --all $1 -- --test-threads 8 From ac1900a0fc3875478b11325b725d0eb0ff787421 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Silva?= Date: Mon, 27 Aug 2018 05:52:49 +0100 Subject: [PATCH 09/15] build: update rocksdb crate (#9414) --- Cargo.lock | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d02cff5ca48..cf2abdd3eb5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -166,9 +166,6 @@ dependencies = [ name = "cc" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "cfg-if" @@ -2714,7 +2711,7 @@ dependencies = [ [[package]] name = "rocksdb" version = "0.4.5" -source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e" +source = "git+https://github.com/paritytech/rust-rocksdb#86460c5e42d63c861b66172657531531de7f00b5" dependencies = [ "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2724,9 +2721,9 @@ dependencies = [ [[package]] name = "rocksdb-sys" version = "0.3.0" -source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e" +source = "git+https://github.com/paritytech/rust-rocksdb#86460c5e42d63c861b66172657531531de7f00b5" dependencies = [ - "cc 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "snappy-sys 0.1.0 (git+https://github.com/paritytech/rust-snappy)", From 6c96b60a63e8894bf944ff09bb54c3dba056b9de Mon Sep 17 00:00:00 2001 From: Afri Schoedon <5chdn@users.noreply.github.com> Date: Mon, 27 Aug 2018 17:01:56 +0200 Subject: [PATCH 10/15] docs: add parity ethereum logo to readme (#9415) * docs: add parity ethereum logo * docs: add logo to readme * docs: align logo center * docs: remove separators from readme * docs: restructure readme * docs: check spelling and grammar in readme * docs: clarify readme * docs: improve readme significantly --- README.md | 115 +++++++++++++++++----------------- docs/logo-parity-ethereum.svg | 1 + 2 files changed, 59 insertions(+), 57 deletions(-) create mode 100644 docs/logo-parity-ethereum.svg diff --git a/README.md b/README.md index 40c04dff727..88138b78cb5 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,44 @@ -## Parity-Ethereum - a fast, light, and robust EVM and WASM blockchain client +![Parity Ethereum](docs/logo-parity-ethereum.svg) -### [» Download the latest release «](https://github.com/paritytech/parity-ethereum/releases/latest) +## The fastest and most advanced Ethereum client. -[![build status](https://gitlab.parity.io/parity/parity/badges/master/build.svg)](https://gitlab.parity.io/parity/parity/commits/master) -[![codecov](https://codecov.io/gh/paritytech/parity-ethereum/branch/master/graph/badge.svg)](https://codecov.io/gh/paritytech/parity-ethereum) -[![Snap Status](https://build.snapcraft.io/badge/paritytech/parity.svg)](https://build.snapcraft.io/user/paritytech/parity) -[![GPLv3](https://img.shields.io/badge/license-GPL%20v3-green.svg)](https://www.gnu.org/licenses/gpl-3.0.en.html) +

» Download the latest release «

+

+ + +

-### Join the chat! +**Built for mission-critical use**: Miners, service providers, and exchanges need fast synchronisation and maximum uptime. Parity Ethereum provides the core infrastructure essential for speedy and reliable services. -Get in touch with us on Gitter: -[![Gitter: Parity](https://img.shields.io/badge/gitter-parity-4AB495.svg)](https://gitter.im/paritytech/parity) -[![Gitter: Parity.js](https://img.shields.io/badge/gitter-parity.js-4AB495.svg)](https://gitter.im/paritytech/parity.js) -[![Gitter: Parity/Miners](https://img.shields.io/badge/gitter-parity/miners-4AB495.svg)](https://gitter.im/paritytech/parity/miners) -[![Gitter: Parity-PoA](https://img.shields.io/badge/gitter-parity--poa-4AB495.svg)](https://gitter.im/paritytech/parity-poa) - -Or join our community on Matrix: -[![Riot: +Parity](https://img.shields.io/badge/riot-%2Bparity%3Amatrix.parity.io-orange.svg)](https://riot.im/app/#/group/+parity:matrix.parity.io) +- Clean, modular codebase for easy customisation +- Advanced CLI-based client +- Minimal memory and storage footprint +- Synchronise in hours, not days with Warp Sync +- Modular for light integration into your service or product -Official website: https://parity.io | Be sure to check out [our wiki](https://wiki.parity.io) for more information. +## Technical Overview ----- +Parity Ethereum's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity Ethereum using the sophisticated and cutting-edge **Rust programming language**. Parity Ethereum is licensed under the GPLv3 and can be used for all your Ethereum needs. -## About Parity-Ethereum +By default, Parity Ethereum runs a JSON-RPC HTTP server on port `:8545` and a Web-Sockets server on port `:8546`. This is fully configurable and supports a number of APIs. -Parity-Ethereum's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity-Ethereum using the sophisticated and cutting-edge Rust programming language. Parity-Ethereum is licensed under the GPLv3, and can be used for all your Ethereum needs. +If you run into problems while using Parity Ethereum, check out the [wiki for documentation](https://wiki.parity.io/), feel free to [file an issue in this repository](https://github.com/paritytech/parity-ethereum/issues/new), or hop on our [Gitter](https://gitter.im/paritytech/parity) or [Riot](https://riot.im/app/#/group/+parity:matrix.parity.io) chat room to ask a question. We are glad to help! **For security-critical issues**, please refer to the security policy outlined in [SECURITY.md](SECURITY.md). -By default, Parity-Ethereum will run a JSON-RPC HTTP server on `127.0.0.1:8545` and a Web-Sockets server on `127.0.0.1:8546`. This is fully configurable and supports a number of APIs. +Parity Ethereum's current beta-release is 2.0. You can download it at [the releases page](https://github.com/paritytech/parity-ethereum/releases) or follow the instructions below to build from source. Please, mind the [CHANGELOG.md](CHANGELOG.md) for a list of all changes between different versions. -If you run into problems while using Parity-Ethereum, feel free to file an issue in this repository or hop on our [Gitter](https://gitter.im/paritytech/parity) or [Riot](https://riot.im/app/#/group/+parity:matrix.parity.io) chat room to ask a question. We are glad to help! **For security-critical issues**, please refer to the security policy outlined in [SECURITY.md](SECURITY.md). +## Build Dependencies -Parity-Ethereum's current beta-release is 2.0. You can download it at [the releases page](https://github.com/paritytech/parity-ethereum/releases) or follow the instructions below to build from source. Please, mind the [CHANGELOG.md](CHANGELOG.md) for a list of all changes between different versions. +Parity Ethereum requires **Rust version 1.28.x** to build. ----- - -## Build dependencies - -**Parity-Ethereum requires Rust version 1.27.0 to build** - -We recommend installing Rust through [rustup](https://www.rustup.rs/). If you don't already have rustup, you can install it like this: +We recommend installing Rust through [rustup](https://www.rustup.rs/). If you don't already have `rustup`, you can install it like this: - Linux: ```bash $ curl https://sh.rustup.rs -sSf | sh ``` - Parity-Ethereum also requires `gcc`, `g++`, `libudev-dev`, `pkg-config`, `file`, `make`, and `cmake` packages to be installed. + Parity Ethereum also requires `gcc`, `g++`, `libudev-dev`, `pkg-config`, `file`, `make`, and `cmake` packages to be installed. - OSX: ```bash @@ -56,21 +48,19 @@ We recommend installing Rust through [rustup](https://www.rustup.rs/). If you do `clang` is required. It comes with Xcode command line tools or can be installed with homebrew. - Windows - Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the rustup installer from - https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe, start "VS2015 x64 Native Tools Command Prompt", and use the following command to install and set up the msvc toolchain: + Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the `rustup` installer from + https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe, start "VS2015 x64 Native Tools Command Prompt", and use the following command to install and set up the `msvc` toolchain: ```bash $ rustup default stable-x86_64-pc-windows-msvc ``` -Once you have rustup installed, then you need to install: +Once you have `rustup` installed, then you need to install: * [Perl](https://www.perl.org) * [Yasm](https://yasm.tortall.net) -Make sure that these binaries are in your `PATH`. After that you should be able to build Parity-Ethereum from source. +Make sure that these binaries are in your `PATH`. After that, you should be able to build Parity Ethereum from source. ----- - -## Install from the snap store +## Install from the Snapcraft Store In any of the [supported Linux distros](https://snapcraft.io/docs/core/install): @@ -78,24 +68,22 @@ In any of the [supported Linux distros](https://snapcraft.io/docs/core/install): sudo snap install parity ``` -Or, if you want to contribute testing the upcoming release: +Alternatively, if you want to contribute testing the upcoming release: ```bash sudo snap install parity --beta ``` -And to test the latest code landed into the master branch: +Moreover, to test the latest code from the master branch: ```bash sudo snap install parity --edge ``` ----- - -## Build from source +## Build from Source Code ```bash -# download Parity-Ethereum code +# download Parity Ethereum code $ git clone https://github.com/paritytech/parity-ethereum $ cd parity-ethereum @@ -103,7 +91,7 @@ $ cd parity-ethereum $ cargo build --release --features final ``` -This will produce an executable in the `./target/release` subdirectory. +This produces an executable in the `./target/release` subdirectory. Note: if cargo fails to parse manifest try: @@ -117,7 +105,7 @@ Note, when compiling a crate and you receive errors, it's in most cases your out $ cargo clean ``` -This will always compile the latest nightly builds. If you want to build stable or beta, do a +This always compiles the latest nightly builds. If you want to build stable or beta, do a ```bash $ git checkout stable @@ -129,11 +117,7 @@ or $ git checkout beta ``` -first. - ----- - -## Simple one-line installer for Mac and Ubuntu +## Simple One-Line Installer for Mac and Linux ```bash bash <(curl https://get.parity.io -L) @@ -145,22 +129,39 @@ The one-line installer always defaults to the latest beta release. To install a bash <(curl https://get.parity.io -L) -r stable ``` -## Start Parity-Ethereum +## Start Parity Ethereum ### Manually -To start Parity-Ethereum manually, just run +To start Parity Ethereum manually, just run ```bash $ ./target/release/parity ``` -and Parity-Ethereum will begin syncing the Ethereum blockchain. +so Parity Ethereum begins syncing the Ethereum blockchain. -### Using systemd service file +### Using `systemd` service file -To start Parity-Ethereum as a regular user using systemd init: +To start Parity Ethereum as a regular user using `systemd` init: 1. Copy `./scripts/parity.service` to your -systemd user directory (usually `~/.config/systemd/user`). -2. To configure Parity-Ethereum, write a `/etc/parity/config.toml` config file, see [Configuring Parity-Ethereum](https://paritytech.github.io/wiki/Configuring-Parity) for details. +`systemd` user directory (usually `~/.config/systemd/user`). +2. To configure Parity Ethereum, write a `/etc/parity/config.toml` config file, see [Configuring Parity Ethereum](https://paritytech.github.io/wiki/Configuring-Parity) for details. + +## Join the chat! + +Questions? Get in touch with us on Gitter: +[![Gitter: Parity](https://img.shields.io/badge/gitter-parity-4AB495.svg)](https://gitter.im/paritytech/parity) +[![Gitter: Parity.js](https://img.shields.io/badge/gitter-parity.js-4AB495.svg)](https://gitter.im/paritytech/parity.js) +[![Gitter: Parity/Miners](https://img.shields.io/badge/gitter-parity/miners-4AB495.svg)](https://gitter.im/paritytech/parity/miners) +[![Gitter: Parity-PoA](https://img.shields.io/badge/gitter-parity--poa-4AB495.svg)](https://gitter.im/paritytech/parity-poa) + +Alternatively, join our community on Matrix: +[![Riot: +Parity](https://img.shields.io/badge/riot-%2Bparity%3Amatrix.parity.io-orange.svg)](https://riot.im/app/#/group/+parity:matrix.parity.io) + +## Documentation + +Official website: https://parity.io + +Be sure to [check out our wiki](https://wiki.parity.io) for more information. diff --git a/docs/logo-parity-ethereum.svg b/docs/logo-parity-ethereum.svg new file mode 100644 index 00000000000..03d1903324c --- /dev/null +++ b/docs/logo-parity-ethereum.svg @@ -0,0 +1 @@ +Asset 37 \ No newline at end of file From 7aa4484a031532b00a88d39a10337013bc2ee0ff Mon Sep 17 00:00:00 2001 From: ewaldhesse Date: Tue, 28 Aug 2018 14:06:36 +0200 Subject: [PATCH 11/15] Update tobalaba.json (#9419) --- ethcore/res/ethereum/tobalaba.json | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/ethcore/res/ethereum/tobalaba.json b/ethcore/res/ethereum/tobalaba.json index 0f5778601a9..52abba113fd 100644 --- a/ethcore/res/ethereum/tobalaba.json +++ b/ethcore/res/ethereum/tobalaba.json @@ -16,14 +16,14 @@ "gasLimitBoundDivisor": "0x400", "minGasLimit": "0x1388", "networkID": "0x62121", - "wasmActivationTransition": 6666666, - "eip140Transition": 6666666, - "eip211Transition": 6666666, - "eip214Transition": 6666666, - "eip658Transition": 6666666, + "wasmActivationTransition": 7250000, + "eip140Transition": 7250000, + "eip211Transition": 7250000, + "eip214Transition": 7250000, + "eip658Transition": 7250000, "maxCodeSize": 24576, - "maxCodeSizeTransition": 6666666, + "maxCodeSizeTransition": 7250000, "registrar": "0xb8624dc8cb3ca3147c178ac4c21734eb49e04071" }, @@ -54,14 +54,14 @@ "balance": "0x7E37BE2022B2B09472D89C0000" }, - "0x0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "activate_at": 6666666, "pricing": { "linear": { "base": 3000, "word": 0 } } } }, - "0x0000000000000000000000000000000000000002": { "builtin": { "name": "sha256", "activate_at": 6666666, "pricing": { "linear": { "base": 60, "word": 12 } } } }, - "0x0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "activate_at": 6666666, "pricing": { "linear": { "base": 600, "word": 120 } } } }, - "0x0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "activate_at": 6666666, "pricing": { "linear": { "base": 15, "word": 3 } } } }, - "0x0000000000000000000000000000000000000005": { "builtin": { "name": "modexp", "activate_at": 6666666, "pricing": { "modexp": { "divisor": 20 } } } }, - "0x0000000000000000000000000000000000000006": { "builtin": { "name": "alt_bn128_add", "activate_at": 6666666, "pricing": { "linear": { "base": 500, "word": 0 } } } }, - "0x0000000000000000000000000000000000000007": { "builtin": { "name": "alt_bn128_mul", "activate_at": 6666666, "pricing": { "linear": { "base": 40000, "word": 0 } } } }, - "0x0000000000000000000000000000000000000008": { "builtin": { "name": "alt_bn128_pairing", "activate_at": 6666666, "pricing": { "alt_bn128_pairing": { "base": 100000, "pair": 80000 } } } } + "0x0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "activate_at": 7250000, "pricing": { "linear": { "base": 3000, "word": 0 } } } }, + "0x0000000000000000000000000000000000000002": { "builtin": { "name": "sha256", "activate_at": 7250000, "pricing": { "linear": { "base": 60, "word": 12 } } } }, + "0x0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "activate_at": 7250000, "pricing": { "linear": { "base": 600, "word": 120 } } } }, + "0x0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "activate_at": 7250000, "pricing": { "linear": { "base": 15, "word": 3 } } } }, + "0x0000000000000000000000000000000000000005": { "builtin": { "name": "modexp", "activate_at": 7250000, "pricing": { "modexp": { "divisor": 20 } } } }, + "0x0000000000000000000000000000000000000006": { "builtin": { "name": "alt_bn128_add", "activate_at": 7250000, "pricing": { "linear": { "base": 500, "word": 0 } } } }, + "0x0000000000000000000000000000000000000007": { "builtin": { "name": "alt_bn128_mul", "activate_at": 7250000, "pricing": { "linear": { "base": 40000, "word": 0 } } } }, + "0x0000000000000000000000000000000000000008": { "builtin": { "name": "alt_bn128_pairing", "activate_at": 7250000, "pricing": { "alt_bn128_pairing": { "base": 100000, "pair": 80000 } } } } }, "nodes": [ From 1073d56245b2087f8b6a0cd605c7572eaea22a07 Mon Sep 17 00:00:00 2001 From: Anton Gavrilov Date: Wed, 29 Aug 2018 14:31:04 +0200 Subject: [PATCH 12/15] Private packets verification and queue refactoring (#8715) * Verify private transaction before propagating * Private transactions queue reworked with tx pool queue direct usage * Styling fixed * Prevent resending private packets to the sender * Process signed private transaction packets via io queue * Test fixed * Build and test fixed after merge * Comments after review fixed * Signed transaction taken from verified * Fix after merge * Pool scoring generalized in order to use externally * Lib refactored according to the review comments * Ready state refactored * Redundant bound and copying removed * Fixed build after the merge * Forgotten case reworked * Review comments fixed * Logging reworked, target added * Fix after merge --- Cargo.lock | 3 + ethcore/private-tx/Cargo.toml | 2 + ethcore/private-tx/src/encryptor.rs | 2 +- ethcore/private-tx/src/error.rs | 2 + ethcore/private-tx/src/lib.rs | 338 +++++++++--------- ethcore/private-tx/src/messages.rs | 49 ++- .../private-tx/src/private_transactions.rs | 224 +++++++----- ethcore/service/Cargo.toml | 1 + ethcore/service/src/lib.rs | 1 + ethcore/service/src/service.rs | 21 +- ethcore/src/client/chain_notify.rs | 4 +- ethcore/src/test_helpers.rs | 4 +- ethcore/sync/src/api.rs | 9 +- ethcore/sync/src/chain/handler.rs | 33 +- ethcore/sync/src/chain/mod.rs | 37 +- ethcore/sync/src/chain/propagator.rs | 21 +- ethcore/sync/src/private_tx.rs | 23 +- ethcore/sync/src/tests/helpers.rs | 8 +- ethcore/sync/src/tests/private.rs | 5 +- miner/src/pool/local_transactions.rs | 2 +- miner/src/pool/mod.rs | 37 +- miner/src/pool/scoring.rs | 64 ++-- 22 files changed, 525 insertions(+), 365 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cf2abdd3eb5..88cebe3a866 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -741,6 +741,7 @@ dependencies = [ "ethkey 0.3.0", "fetch 0.1.0", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", @@ -756,6 +757,7 @@ dependencies = [ "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "transaction-pool 1.13.1", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -808,6 +810,7 @@ dependencies = [ "ethcore-io 1.12.0", "ethcore-private-tx 1.0.0", "ethcore-sync 1.12.0", + "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/ethcore/private-tx/Cargo.toml b/ethcore/private-tx/Cargo.toml index 2383443e734..cebf388f156 100644 --- a/ethcore/private-tx/Cargo.toml +++ b/ethcore/private-tx/Cargo.toml @@ -22,6 +22,7 @@ ethjson = { path = "../../json" } ethkey = { path = "../../ethkey" } fetch = { path = "../../util/fetch" } futures = "0.1" +heapsize = "0.4" keccak-hash = { git = "https://github.com/paritytech/parity-common" } log = "0.4" parking_lot = "0.6" @@ -35,6 +36,7 @@ serde = "1.0" serde_derive = "1.0" serde_json = "1.0" tiny-keccak = "1.4" +transaction-pool = { path = "../../transaction-pool" } url = "1" [dev-dependencies] diff --git a/ethcore/private-tx/src/encryptor.rs b/ethcore/private-tx/src/encryptor.rs index e64917add0a..c1d3d3fb8a6 100644 --- a/ethcore/private-tx/src/encryptor.rs +++ b/ethcore/private-tx/src/encryptor.rs @@ -208,7 +208,7 @@ impl Encryptor for SecretStoreEncryptor { let key = match self.retrieve_key("", false, contract_address, &*accounts) { Ok(key) => Ok(key), Err(Error(ErrorKind::EncryptionKeyNotFound(_), _)) => { - trace!("Key for account wasnt found in sstore. Creating. Address: {:?}", contract_address); + trace!(target: "privatetx", "Key for account wasnt found in sstore. Creating. Address: {:?}", contract_address); self.retrieve_key(&format!("/{}", self.config.threshold), true, contract_address, &*accounts) } Err(err) => Err(err), diff --git a/ethcore/private-tx/src/error.rs b/ethcore/private-tx/src/error.rs index 55a75d6d9ed..99da149e4aa 100644 --- a/ethcore/private-tx/src/error.rs +++ b/ethcore/private-tx/src/error.rs @@ -21,12 +21,14 @@ use ethcore::account_provider::SignError; use ethcore::error::{Error as EthcoreError, ExecutionError}; use transaction::Error as TransactionError; use ethkey::Error as KeyError; +use txpool::Error as TxPoolError; error_chain! { foreign_links { Io(::std::io::Error) #[doc = "Error concerning the Rust standard library's IO subsystem."]; Decoder(DecoderError) #[doc = "RLP decoding error."]; Trie(TrieError) #[doc = "Error concerning TrieDBs."]; + Txpool(TxPoolError) #[doc = "Tx pool error."]; } errors { diff --git a/ethcore/private-tx/src/lib.rs b/ethcore/private-tx/src/lib.rs index a661197dadb..24727fe0f5b 100644 --- a/ethcore/private-tx/src/lib.rs +++ b/ethcore/private-tx/src/lib.rs @@ -37,9 +37,11 @@ extern crate ethkey; extern crate ethjson; extern crate fetch; extern crate futures; +extern crate heapsize; extern crate keccak_hash as hash; extern crate parking_lot; extern crate patricia_trie as trie; +extern crate transaction_pool as txpool; extern crate patricia_trie_ethereum as ethtrie; extern crate rlp; extern crate url; @@ -61,7 +63,7 @@ extern crate rand; extern crate ethcore_logger; pub use encryptor::{Encryptor, SecretStoreEncryptor, EncryptorConfig, NoopEncryptor}; -pub use private_transactions::{PrivateTransactionDesc, VerificationStore, PrivateTransactionSigningDesc, SigningStore}; +pub use private_transactions::{VerifiedPrivateTransaction, VerificationStore, PrivateTransactionSigningDesc, SigningStore}; pub use messages::{PrivateTransaction, SignedPrivateTransaction}; pub use error::{Error, ErrorKind}; @@ -71,7 +73,7 @@ use std::time::Duration; use ethereum_types::{H128, H256, U256, Address}; use hash::keccak; use rlp::*; -use parking_lot::{Mutex, RwLock}; +use parking_lot::RwLock; use bytes::Bytes; use ethkey::{Signature, recover, public_to_address}; use io::IoChannel; @@ -128,9 +130,8 @@ pub struct Provider { signer_account: Option
, passwords: Vec, notify: RwLock>>, - transactions_for_signing: Mutex, - // TODO [ToDr] Move the Mutex/RwLock inside `VerificationStore` after refactored to `drain`. - transactions_for_verification: Mutex, + transactions_for_signing: RwLock, + transactions_for_verification: VerificationStore, client: Arc, miner: Arc, accounts: Arc, @@ -161,8 +162,8 @@ impl Provider where { signer_account: config.signer_account, passwords: config.passwords, notify: RwLock::default(), - transactions_for_signing: Mutex::default(), - transactions_for_verification: Mutex::default(), + transactions_for_signing: RwLock::default(), + transactions_for_verification: VerificationStore::default(), client, miner, accounts, @@ -190,9 +191,9 @@ impl Provider where { /// 3. Save it with state returned on prev step to the queue for signing /// 4. Broadcast corresponding message to the chain pub fn create_private_transaction(&self, signed_transaction: SignedTransaction) -> Result { - trace!("Creating private transaction from regular transaction: {:?}", signed_transaction); + trace!(target: "privatetx", "Creating private transaction from regular transaction: {:?}", signed_transaction); if self.signer_account.is_none() { - trace!("Signing account not set"); + warn!(target: "privatetx", "Signing account not set"); bail!(ErrorKind::SignerAccountNotSet); } let tx_hash = signed_transaction.hash(); @@ -203,10 +204,7 @@ impl Provider where { Action::Call(contract) => { let data = signed_transaction.rlp_bytes(); let encrypted_transaction = self.encrypt(&contract, &Self::iv_from_transaction(&signed_transaction), &data)?; - let private = PrivateTransaction { - encrypted: encrypted_transaction, - contract, - }; + let private = PrivateTransaction::new(encrypted_transaction, contract); // TODO [ToDr] Using BlockId::Latest is bad here, // the block may change in the middle of execution // causing really weird stuff to happen. @@ -215,16 +213,16 @@ impl Provider where { // in private-tx to avoid such mistakes. let contract_nonce = self.get_contract_nonce(&contract, BlockId::Latest)?; let private_state = self.execute_private_transaction(BlockId::Latest, &signed_transaction)?; - trace!("Private transaction created, encrypted transaction: {:?}, private state: {:?}", private, private_state); + trace!(target: "privatetx", "Private transaction created, encrypted transaction: {:?}, private state: {:?}", private, private_state); let contract_validators = self.get_validators(BlockId::Latest, &contract)?; - trace!("Required validators: {:?}", contract_validators); + trace!(target: "privatetx", "Required validators: {:?}", contract_validators); let private_state_hash = self.calculate_state_hash(&private_state, contract_nonce); - trace!("Hashed effective private state for sender: {:?}", private_state_hash); - self.transactions_for_signing.lock().add_transaction(private.hash(), signed_transaction, contract_validators, private_state, contract_nonce)?; - self.broadcast_private_transaction(private.rlp_bytes().into_vec()); + trace!(target: "privatetx", "Hashed effective private state for sender: {:?}", private_state_hash); + self.transactions_for_signing.write().add_transaction(private.hash(), signed_transaction, contract_validators, private_state, contract_nonce)?; + self.broadcast_private_transaction(private.hash(), private.rlp_bytes().into_vec()); Ok(Receipt { hash: tx_hash, - contract_address: None, + contract_address: Some(contract), status_code: 0, }) } @@ -240,14 +238,6 @@ impl Provider where { keccak(&state_buf.as_ref()) } - /// Extract signed transaction from private transaction - fn extract_original_transaction(&self, private: PrivateTransaction, contract: &Address) -> Result { - let encrypted_transaction = private.encrypted; - let transaction_bytes = self.decrypt(contract, &encrypted_transaction)?; - let original_transaction: UnverifiedTransaction = Rlp::new(&transaction_bytes).as_val()?; - Ok(original_transaction) - } - fn pool_client<'a>(&'a self, nonce_cache: &'a NonceCache) -> miner::pool_client::PoolClient<'a, Client> { let engine = self.client.engine(); let refuse_service_transactions = true; @@ -261,48 +251,122 @@ impl Provider where { } /// Retrieve and verify the first available private transaction for every sender - /// - /// TODO [ToDr] It seems that: - /// The 3 methods `ready_transaction,get_descriptor,remove` are always used in conjuction so most likely - /// can be replaced with a single `drain()` method instead. - /// Thanks to this we also don't really need to lock the entire verification for the time of execution. - fn process_queue(&self) -> Result<(), Error> { + fn process_verification_queue(&self) -> Result<(), Error> { let nonce_cache = NonceCache::new(NONCE_CACHE_SIZE); - let mut verification_queue = self.transactions_for_verification.lock(); - let ready_transactions = verification_queue.ready_transactions(self.pool_client(&nonce_cache)); - for transaction in ready_transactions { - let transaction_hash = transaction.signed().hash(); - match verification_queue.private_transaction_descriptor(&transaction_hash) { - Ok(desc) => { - if !self.validator_accounts.contains(&desc.validator_account) { - trace!("Cannot find validator account in config"); - bail!(ErrorKind::ValidatorAccountNotSet); + let process_transaction = |transaction: &VerifiedPrivateTransaction| -> Result<_, String> { + let private_hash = transaction.private_transaction.hash(); + match transaction.validator_account { + None => { + trace!(target: "privatetx", "Propagating transaction further"); + self.broadcast_private_transaction(private_hash, transaction.private_transaction.rlp_bytes().into_vec()); + return Ok(()); + } + Some(validator_account) => { + if !self.validator_accounts.contains(&validator_account) { + trace!(target: "privatetx", "Propagating transaction further"); + self.broadcast_private_transaction(private_hash, transaction.private_transaction.rlp_bytes().into_vec()); + return Ok(()); } - let account = desc.validator_account; - if let Action::Call(contract) = transaction.signed().action { + let tx_action = transaction.transaction.action.clone(); + if let Action::Call(contract) = tx_action { // TODO [ToDr] Usage of BlockId::Latest - let contract_nonce = self.get_contract_nonce(&contract, BlockId::Latest)?; - let private_state = self.execute_private_transaction(BlockId::Latest, transaction.signed())?; + let contract_nonce = self.get_contract_nonce(&contract, BlockId::Latest); + if let Err(e) = contract_nonce { + bail!("Cannot retrieve contract nonce: {:?}", e); + } + let contract_nonce = contract_nonce.expect("Error was checked before"); + let private_state = self.execute_private_transaction(BlockId::Latest, &transaction.transaction); + if let Err(e) = private_state { + bail!("Cannot retrieve private state: {:?}", e); + } + let private_state = private_state.expect("Error was checked before"); let private_state_hash = self.calculate_state_hash(&private_state, contract_nonce); - trace!("Hashed effective private state for validator: {:?}", private_state_hash); - let password = find_account_password(&self.passwords, &*self.accounts, &account); - let signed_state = self.accounts.sign(account, password, private_state_hash)?; - let signed_private_transaction = SignedPrivateTransaction::new(desc.private_hash, signed_state, None); - trace!("Sending signature for private transaction: {:?}", signed_private_transaction); - self.broadcast_signed_private_transaction(signed_private_transaction.rlp_bytes().into_vec()); + trace!(target: "privatetx", "Hashed effective private state for validator: {:?}", private_state_hash); + let password = find_account_password(&self.passwords, &*self.accounts, &validator_account); + let signed_state = self.accounts.sign(validator_account, password, private_state_hash); + if let Err(e) = signed_state { + bail!("Cannot sign the state: {:?}", e); + } + let signed_state = signed_state.expect("Error was checked before"); + let signed_private_transaction = SignedPrivateTransaction::new(private_hash, signed_state, None); + trace!(target: "privatetx", "Sending signature for private transaction: {:?}", signed_private_transaction); + self.broadcast_signed_private_transaction(signed_private_transaction.hash(), signed_private_transaction.rlp_bytes().into_vec()); } else { - warn!("Incorrect type of action for the transaction"); + bail!("Incorrect type of action for the transaction"); } - }, - Err(e) => { - warn!("Cannot retrieve descriptor for transaction with error {:?}", e); } } - verification_queue.remove_private_transaction(&transaction_hash); + Ok(()) + }; + let ready_transactions = self.transactions_for_verification.drain(self.pool_client(&nonce_cache)); + for transaction in ready_transactions { + if let Err(e) = process_transaction(&transaction) { + warn!(target: "privatetx", "Error: {:?}", e); + } } Ok(()) } + /// Add signed private transaction into the store + /// Creates corresponding public transaction if last required signature collected and sends it to the chain + pub fn process_signature(&self, signed_tx: &SignedPrivateTransaction) -> Result<(), Error> { + trace!(target: "privatetx", "Processing signed private transaction"); + let private_hash = signed_tx.private_transaction_hash(); + let desc = match self.transactions_for_signing.read().get(&private_hash) { + None => { + // Not our transaction, broadcast further to peers + self.broadcast_signed_private_transaction(signed_tx.hash(), signed_tx.rlp_bytes().into_vec()); + return Ok(()); + }, + Some(desc) => desc, + }; + let last = self.last_required_signature(&desc, signed_tx.signature())?; + + if last { + let mut signatures = desc.received_signatures.clone(); + signatures.push(signed_tx.signature()); + let rsv: Vec = signatures.into_iter().map(|sign| sign.into_electrum().into()).collect(); + //Create public transaction + let public_tx = self.public_transaction( + desc.state.clone(), + &desc.original_transaction, + &rsv, + desc.original_transaction.nonce, + desc.original_transaction.gas_price + )?; + trace!(target: "privatetx", "Last required signature received, public transaction created: {:?}", public_tx); + //Sign and add it to the queue + let chain_id = desc.original_transaction.chain_id(); + let hash = public_tx.hash(chain_id); + let signer_account = self.signer_account.ok_or_else(|| ErrorKind::SignerAccountNotSet)?; + let password = find_account_password(&self.passwords, &*self.accounts, &signer_account); + let signature = self.accounts.sign(signer_account, password, hash)?; + let signed = SignedTransaction::new(public_tx.with_signature(signature, chain_id))?; + match self.miner.import_own_transaction(&*self.client, signed.into()) { + Ok(_) => trace!(target: "privatetx", "Public transaction added to queue"), + Err(err) => { + warn!(target: "privatetx", "Failed to add transaction to queue, error: {:?}", err); + bail!(err); + } + } + //Remove from store for signing + if let Err(err) = self.transactions_for_signing.write().remove(&private_hash) { + warn!(target: "privatetx", "Failed to remove transaction from signing store, error: {:?}", err); + bail!(err); + } + } else { + //Add signature to the store + match self.transactions_for_signing.write().add_signature(&private_hash, signed_tx.signature()) { + Ok(_) => trace!(target: "privatetx", "Signature stored for private transaction"), + Err(err) => { + warn!(target: "privatetx", "Failed to add signature to signing store, error: {:?}", err); + bail!(err); + } + } + } + Ok(()) + } + fn last_required_signature(&self, desc: &PrivateTransactionSigningDesc, sign: Signature) -> Result { if desc.received_signatures.contains(&sign) { return Ok(false); @@ -316,26 +380,26 @@ impl Provider where { Ok(desc.received_signatures.len() + 1 == desc.validators.len()) } false => { - trace!("Sender's state doesn't correspond to validator's"); + warn!(target: "privatetx", "Sender's state doesn't correspond to validator's"); bail!(ErrorKind::StateIncorrect); } } } Err(err) => { - trace!("Sender's state doesn't correspond to validator's, error {:?}", err); + warn!(target: "privatetx", "Sender's state doesn't correspond to validator's, error {:?}", err); bail!(err); } } } /// Broadcast the private transaction message to the chain - fn broadcast_private_transaction(&self, message: Bytes) { - self.notify(|notify| notify.broadcast(ChainMessageType::PrivateTransaction(message.clone()))); + fn broadcast_private_transaction(&self, transaction_hash: H256, message: Bytes) { + self.notify(|notify| notify.broadcast(ChainMessageType::PrivateTransaction(transaction_hash, message.clone()))); } /// Broadcast signed private transaction message to the chain - fn broadcast_signed_private_transaction(&self, message: Bytes) { - self.notify(|notify| notify.broadcast(ChainMessageType::SignedPrivateTransaction(message.clone()))); + fn broadcast_signed_private_transaction(&self, transaction_hash: H256, message: Bytes) { + self.notify(|notify| notify.broadcast(ChainMessageType::SignedPrivateTransaction(transaction_hash, message.clone()))); } fn iv_from_transaction(transaction: &SignedTransaction) -> H128 { @@ -351,12 +415,12 @@ impl Provider where { } fn encrypt(&self, contract_address: &Address, initialisation_vector: &H128, data: &[u8]) -> Result { - trace!("Encrypt data using key(address): {:?}", contract_address); + trace!(target: "privatetx", "Encrypt data using key(address): {:?}", contract_address); Ok(self.encryptor.encrypt(contract_address, &*self.accounts, initialisation_vector, data)?) } fn decrypt(&self, contract_address: &Address, data: &[u8]) -> Result { - trace!("Decrypt data using key(address): {:?}", contract_address); + trace!(target: "privatetx", "Decrypt data using key(address): {:?}", contract_address); Ok(self.encryptor.decrypt(contract_address, &*self.accounts, data)?) } @@ -421,7 +485,7 @@ impl Provider where { Action::Call(ref contract_address) => { let contract_code = Arc::new(self.get_decrypted_code(contract_address, block)?); let contract_state = self.get_decrypted_state(contract_address, block)?; - trace!("Patching contract at {:?}, code: {:?}, state: {:?}", contract_address, contract_code, contract_state); + trace!(target: "privatetx", "Patching contract at {:?}, code: {:?}, state: {:?}", contract_address, contract_code, contract_state); state.patch_account(contract_address, contract_code, Self::snapshot_to_storage(contract_state))?; Some(*contract_address) }, @@ -449,7 +513,7 @@ impl Provider where { (enc_code, self.encrypt(&address, &Self::iv_from_transaction(transaction), &Self::snapshot_from_storage(&storage))?) }, }; - trace!("Private contract executed. code: {:?}, state: {:?}, result: {:?}", encrypted_code, encrypted_storage, result.output); + trace!(target: "privatetx", "Private contract executed. code: {:?}, state: {:?}, result: {:?}", encrypted_code, encrypted_storage, result.output); Ok(PrivateExecutionResult { code: encrypted_code, state: encrypted_storage, @@ -550,12 +614,12 @@ impl Provider where { pub trait Importer { /// Process received private transaction - fn import_private_transaction(&self, _rlp: &[u8]) -> Result<(), Error>; + fn import_private_transaction(&self, _rlp: &[u8]) -> Result; /// Add signed private transaction into the store /// /// Creates corresponding public transaction if last required signature collected and sends it to the chain - fn import_signed_private_transaction(&self, _rlp: &[u8]) -> Result<(), Error>; + fn import_signed_private_transaction(&self, _rlp: &[u8]) -> Result; } // TODO [ToDr] Offload more heavy stuff to the IoService thread. @@ -564,115 +628,59 @@ pub trait Importer { // for both verification and execution. impl Importer for Arc { - fn import_private_transaction(&self, rlp: &[u8]) -> Result<(), Error> { - trace!("Private transaction received"); + fn import_private_transaction(&self, rlp: &[u8]) -> Result { + trace!(target: "privatetx", "Private transaction received"); let private_tx: PrivateTransaction = Rlp::new(rlp).as_val()?; - let contract = private_tx.contract; + let private_tx_hash = private_tx.hash(); + let contract = private_tx.contract(); let contract_validators = self.get_validators(BlockId::Latest, &contract)?; let validation_account = contract_validators .iter() .find(|address| self.validator_accounts.contains(address)); - match validation_account { - None => { - // TODO [ToDr] This still seems a bit invalid, imho we should still import the transaction to the pool. - // Importing to pool verifies correctness and nonce; here we are just blindly forwarding. - // - // Not for verification, broadcast further to peers - self.broadcast_private_transaction(rlp.into()); - return Ok(()); - }, - Some(&validation_account) => { - let hash = private_tx.hash(); - trace!("Private transaction taken for verification"); - let original_tx = self.extract_original_transaction(private_tx, &contract)?; - trace!("Validating transaction: {:?}", original_tx); - // Verify with the first account available - trace!("The following account will be used for verification: {:?}", validation_account); - let nonce_cache = NonceCache::new(NONCE_CACHE_SIZE); - self.transactions_for_verification.lock().add_transaction( - original_tx, - contract, - validation_account, - hash, - self.pool_client(&nonce_cache), - )?; - let provider = Arc::downgrade(self); - self.channel.send(ClientIoMessage::execute(move |_| { - if let Some(provider) = provider.upgrade() { - if let Err(e) = provider.process_queue() { - debug!("Unable to process the queue: {}", e); - } - } - })).map_err(|_| ErrorKind::ClientIsMalformed.into()) + //extract the original transaction + let encrypted_data = private_tx.encrypted(); + let transaction_bytes = self.decrypt(&contract, &encrypted_data)?; + let original_tx: UnverifiedTransaction = Rlp::new(&transaction_bytes).as_val()?; + let nonce_cache = NonceCache::new(NONCE_CACHE_SIZE); + //add to the queue for further verification + self.transactions_for_verification.add_transaction( + original_tx, + validation_account.map(|&account| account), + private_tx, + self.pool_client(&nonce_cache), + )?; + let provider = Arc::downgrade(self); + let result = self.channel.send(ClientIoMessage::execute(move |_| { + if let Some(provider) = provider.upgrade() { + if let Err(e) = provider.process_verification_queue() { + warn!(target: "privatetx", "Unable to process the queue: {}", e); + } } + })); + if let Err(e) = result { + warn!(target: "privatetx", "Error sending NewPrivateTransaction message: {:?}", e); } + Ok(private_tx_hash) } - fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result<(), Error> { + fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result { let tx: SignedPrivateTransaction = Rlp::new(rlp).as_val()?; - trace!("Signature for private transaction received: {:?}", tx); + trace!(target: "privatetx", "Signature for private transaction received: {:?}", tx); let private_hash = tx.private_transaction_hash(); - let desc = match self.transactions_for_signing.lock().get(&private_hash) { - None => { - // TODO [ToDr] Verification (we can't just blindly forward every transaction) - - // Not our transaction, broadcast further to peers - self.broadcast_signed_private_transaction(rlp.into()); - return Ok(()); - }, - Some(desc) => desc, - }; - - let last = self.last_required_signature(&desc, tx.signature())?; - - if last { - let mut signatures = desc.received_signatures.clone(); - signatures.push(tx.signature()); - let rsv: Vec = signatures.into_iter().map(|sign| sign.into_electrum().into()).collect(); - //Create public transaction - let public_tx = self.public_transaction( - desc.state.clone(), - &desc.original_transaction, - &rsv, - desc.original_transaction.nonce, - desc.original_transaction.gas_price - )?; - trace!("Last required signature received, public transaction created: {:?}", public_tx); - //Sign and add it to the queue - let chain_id = desc.original_transaction.chain_id(); - let hash = public_tx.hash(chain_id); - let signer_account = self.signer_account.ok_or_else(|| ErrorKind::SignerAccountNotSet)?; - let password = find_account_password(&self.passwords, &*self.accounts, &signer_account); - let signature = self.accounts.sign(signer_account, password, hash)?; - let signed = SignedTransaction::new(public_tx.with_signature(signature, chain_id))?; - match self.miner.import_own_transaction(&*self.client, signed.into()) { - Ok(_) => trace!("Public transaction added to queue"), - Err(err) => { - trace!("Failed to add transaction to queue, error: {:?}", err); - bail!(err); - } - } - //Remove from store for signing - match self.transactions_for_signing.lock().remove(&private_hash) { - Ok(_) => {} - Err(err) => { - trace!("Failed to remove transaction from signing store, error: {:?}", err); - bail!(err); - } - } - } else { - //Add signature to the store - match self.transactions_for_signing.lock().add_signature(&private_hash, tx.signature()) { - Ok(_) => trace!("Signature stored for private transaction"), - Err(err) => { - trace!("Failed to add signature to signing store, error: {:?}", err); - bail!(err); + let provider = Arc::downgrade(self); + let result = self.channel.send(ClientIoMessage::execute(move |_| { + if let Some(provider) = provider.upgrade() { + if let Err(e) = provider.process_signature(&tx) { + warn!(target: "privatetx", "Unable to process the signature: {}", e); } } + })); + if let Err(e) = result { + warn!(target: "privatetx", "Error sending NewSignedPrivateTransaction message: {:?}", e); } - Ok(()) + Ok(private_hash) } } @@ -689,9 +697,9 @@ fn find_account_password(passwords: &Vec, account_provider: &AccountPr impl ChainNotify for Provider { fn new_blocks(&self, imported: Vec, _invalid: Vec, _route: ChainRoute, _sealed: Vec, _proposed: Vec, _duration: Duration) { if !imported.is_empty() { - trace!("New blocks imported, try to prune the queue"); - if let Err(err) = self.process_queue() { - trace!("Cannot prune private transactions queue. error: {:?}", err); + trace!(target: "privatetx", "New blocks imported, try to prune the queue"); + if let Err(err) = self.process_verification_queue() { + warn!(target: "privatetx", "Cannot prune private transactions queue. error: {:?}", err); } } } diff --git a/ethcore/private-tx/src/messages.rs b/ethcore/private-tx/src/messages.rs index 57362e7ce62..c0825fb59b6 100644 --- a/ethcore/private-tx/src/messages.rs +++ b/ethcore/private-tx/src/messages.rs @@ -25,15 +25,41 @@ use transaction::signature::{add_chain_replay_protection, check_replay_protectio #[derive(Default, Debug, Clone, PartialEq, RlpEncodable, RlpDecodable, Eq)] pub struct PrivateTransaction { /// Encrypted data - pub encrypted: Bytes, + encrypted: Bytes, /// Address of the contract - pub contract: Address, + contract: Address, + /// Hash + hash: H256, } impl PrivateTransaction { - /// Compute hash on private transaction + /// Constructor + pub fn new(encrypted: Bytes, contract: Address) -> Self { + PrivateTransaction { + encrypted, + contract, + hash: 0.into(), + }.compute_hash() + } + + fn compute_hash(mut self) -> PrivateTransaction { + self.hash = keccak(&*self.rlp_bytes()); + self + } + + /// Hash of the private transaction pub fn hash(&self) -> H256 { - keccak(&*self.rlp_bytes()) + self.hash + } + + /// Address of the contract + pub fn contract(&self) -> Address { + self.contract + } + + /// Encrypted data + pub fn encrypted(&self) -> Bytes { + self.encrypted.clone() } } @@ -49,6 +75,8 @@ pub struct SignedPrivateTransaction { r: U256, /// The S field of the signature s: U256, + /// Hash + hash: H256, } impl SignedPrivateTransaction { @@ -59,7 +87,13 @@ impl SignedPrivateTransaction { r: sig.r().into(), s: sig.s().into(), v: add_chain_replay_protection(sig.v() as u64, chain_id), - } + hash: 0.into(), + }.compute_hash() + } + + fn compute_hash(mut self) -> SignedPrivateTransaction { + self.hash = keccak(&*self.rlp_bytes()); + self } pub fn standard_v(&self) -> u8 { check_replay_protection(self.v) } @@ -73,4 +107,9 @@ impl SignedPrivateTransaction { pub fn private_transaction_hash(&self) -> H256 { self.private_transaction_hash } + + /// Own hash + pub fn hash(&self) -> H256 { + self.hash + } } diff --git a/ethcore/private-tx/src/private_transactions.rs b/ethcore/private-tx/src/private_transactions.rs index e16d6ab911b..a0f58f9cabc 100644 --- a/ethcore/private-tx/src/private_transactions.rs +++ b/ethcore/private-tx/src/private_transactions.rs @@ -15,62 +15,139 @@ // along with Parity. If not, see . use std::sync::Arc; +use std::cmp; use std::collections::{HashMap, HashSet}; use bytes::Bytes; use ethcore_miner::pool; use ethereum_types::{H256, U256, Address}; +use heapsize::HeapSizeOf; use ethkey::Signature; +use messages::PrivateTransaction; +use parking_lot::RwLock; use transaction::{UnverifiedTransaction, SignedTransaction}; - +use txpool; +use txpool::{VerifiedTransaction, Verifier}; use error::{Error, ErrorKind}; +type Pool = txpool::Pool; + /// Maximum length for private transactions queues. const MAX_QUEUE_LEN: usize = 8312; -/// Desriptor for private transaction stored in queue for verification -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct PrivateTransactionDesc { - /// Hash of the private transaction - pub private_hash: H256, - /// Contract's address used in private transaction - pub contract: Address, +/// Private transaction stored in queue for verification +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct VerifiedPrivateTransaction { + /// Original private transaction + pub private_transaction: PrivateTransaction, /// Address that should be used for verification - pub validator_account: Address, + pub validator_account: Option
, + /// Resulting verified transaction + pub transaction: SignedTransaction, + /// Original transaction hash + pub transaction_hash: H256, + /// Original transaction sender + pub transaction_sender: Address, +} + +impl txpool::VerifiedTransaction for VerifiedPrivateTransaction { + type Hash = H256; + type Sender = Address; + + fn hash(&self) -> &H256 { + &self.transaction_hash + } + + fn mem_usage(&self) -> usize { + self.transaction.heap_size_of_children() + } + + fn sender(&self) -> &Address { + &self.transaction_sender + } +} + +impl pool::ScoredTransaction for VerifiedPrivateTransaction { + fn priority(&self) -> pool::Priority { + pool::Priority::Regular + } + + /// Gets transaction gas price. + fn gas_price(&self) -> &U256 { + &self.transaction.gas_price + } + + /// Gets transaction nonce. + fn nonce(&self) -> U256 { + self.transaction.nonce + } +} + +/// Checks readiness of transactions by looking if the transaction from sender already exists. +/// Guarantees only one transaction per sender +#[derive(Debug)] +pub struct PrivateReadyState { + senders: HashSet
, + state: C, +} + +impl PrivateReadyState { + /// Create new State checker, given client interface. + pub fn new( + state: C, + ) -> Self { + PrivateReadyState { + senders: Default::default(), + state, + } + } +} + +impl txpool::Ready for PrivateReadyState { + fn is_ready(&mut self, tx: &VerifiedPrivateTransaction) -> txpool::Readiness { + let sender = tx.sender(); + let state = &self.state; + let state_nonce = state.account_nonce(sender); + if self.senders.contains(sender) { + txpool::Readiness::Future + } else { + self.senders.insert(*sender); + match tx.transaction.nonce.cmp(&state_nonce) { + cmp::Ordering::Greater => txpool::Readiness::Future, + cmp::Ordering::Less => txpool::Readiness::Stale, + cmp::Ordering::Equal => txpool::Readiness::Ready, + } + } + } } /// Storage for private transactions for verification pub struct VerificationStore { - /// Descriptors for private transactions in queue for verification with key - hash of the original transaction - descriptors: HashMap, - /// Queue with transactions for verification - /// - /// TODO [ToDr] Might actually be better to use `txpool` directly and: - /// 1. Store descriptors inside `VerifiedTransaction` - /// 2. Use custom `ready` implementation to only fetch one transaction per sender. - /// 3. Get rid of passing dummy `block_number` and `timestamp` - transactions: pool::TransactionQueue, + verification_pool: RwLock, + verification_options: pool::verifier::Options, } impl Default for VerificationStore { fn default() -> Self { VerificationStore { - descriptors: Default::default(), - transactions: pool::TransactionQueue::new( - pool::Options { - max_count: MAX_QUEUE_LEN, - max_per_sender: MAX_QUEUE_LEN / 10, - max_mem_usage: 8 * 1024 * 1024, - }, - pool::verifier::Options { - // TODO [ToDr] This should probably be based on some real values? - minimal_gas_price: 0.into(), - block_gas_limit: 8_000_000.into(), - tx_gas_limit: U256::max_value(), - no_early_reject: false - }, - pool::PrioritizationStrategy::GasPriceOnly, - ) + verification_pool: RwLock::new( + txpool::Pool::new( + txpool::NoopListener, + pool::scoring::NonceAndGasPrice(pool::PrioritizationStrategy::GasPriceOnly), + pool::Options { + max_count: MAX_QUEUE_LEN, + max_per_sender: MAX_QUEUE_LEN / 10, + max_mem_usage: 8 * 1024 * 1024, + }, + ) + ), + verification_options: pool::verifier::Options { + // TODO [ToDr] This should probably be based on some real values? + minimal_gas_price: 0.into(), + block_gas_limit: 8_000_000.into(), + tx_gas_limit: U256::max_value(), + no_early_reject: false, + }, } } } @@ -78,66 +155,43 @@ impl Default for VerificationStore { impl VerificationStore { /// Adds private transaction for verification into the store pub fn add_transaction( - &mut self, + &self, transaction: UnverifiedTransaction, - contract: Address, - validator_account: Address, - private_hash: H256, + validator_account: Option
, + private_transaction: PrivateTransaction, client: C, ) -> Result<(), Error> { - if self.descriptors.len() > MAX_QUEUE_LEN { - bail!(ErrorKind::QueueIsFull); - } - let transaction_hash = transaction.hash(); - if self.descriptors.get(&transaction_hash).is_some() { - bail!(ErrorKind::PrivateTransactionAlreadyImported); - } - - let results = self.transactions.import( - client, - vec![pool::verifier::Transaction::Unverified(transaction)], - ); - - // Verify that transaction was imported - results.into_iter() - .next() - .expect("One transaction inserted; one result returned; qed")?; - - self.descriptors.insert(transaction_hash, PrivateTransactionDesc { - private_hash, - contract, + let options = self.verification_options.clone(); + // Use pool's verifying pipeline for original transaction's verification + let verifier = pool::verifier::Verifier::new(client, options, Default::default(), None); + let unverified = pool::verifier::Transaction::Unverified(transaction); + let verified_tx = verifier.verify_transaction(unverified)?; + let signed_tx: SignedTransaction = verified_tx.signed().clone(); + let signed_hash = signed_tx.hash(); + let signed_sender = signed_tx.sender(); + let verified = VerifiedPrivateTransaction { + private_transaction, validator_account, - }); - + transaction: signed_tx, + transaction_hash: signed_hash, + transaction_sender: signed_sender, + }; + let mut pool = self.verification_pool.write(); + pool.import(verified)?; Ok(()) } - /// Returns transactions ready for verification + /// Drains transactions ready for verification from the pool /// Returns only one transaction per sender because several cannot be verified in a row without verification from other peers - pub fn ready_transactions(&self, client: C) -> Vec> { - // We never store PendingTransactions and we don't use internal cache, - // so we don't need to provide real block number of timestamp here - let block_number = 0; - let timestamp = 0; - let nonce_cap = None; - - self.transactions.collect_pending(client, block_number, timestamp, nonce_cap, |transactions| { - // take only one transaction per sender - let mut senders = HashSet::with_capacity(self.descriptors.len()); - transactions.filter(move |tx| senders.insert(tx.signed().sender())).collect() - }) - } - - /// Returns descriptor of the corresponding private transaction - pub fn private_transaction_descriptor(&self, transaction_hash: &H256) -> Result<&PrivateTransactionDesc, Error> { - self.descriptors.get(transaction_hash).ok_or(ErrorKind::PrivateTransactionNotFound.into()) - } - - /// Remove transaction from the queue for verification - pub fn remove_private_transaction(&mut self, transaction_hash: &H256) { - self.descriptors.remove(transaction_hash); - self.transactions.remove(&[*transaction_hash], true); + pub fn drain(&self, client: C) -> Vec> { + let ready = PrivateReadyState::new(client); + let transactions: Vec<_> = self.verification_pool.read().pending(ready).collect(); + let mut pool = self.verification_pool.write(); + for tx in &transactions { + pool.remove(tx.hash(), true); + } + transactions } } diff --git a/ethcore/service/Cargo.toml b/ethcore/service/Cargo.toml index e1ba1c81f62..ce445f6d9f8 100644 --- a/ethcore/service/Cargo.toml +++ b/ethcore/service/Cargo.toml @@ -10,6 +10,7 @@ ethcore = { path = ".." } ethcore-io = { path = "../../util/io" } ethcore-private-tx = { path = "../private-tx" } ethcore-sync = { path = "../sync" } +ethereum-types = "0.3" kvdb = { git = "https://github.com/paritytech/parity-common" } log = "0.4" stop-guard = { path = "../../util/stop-guard" } diff --git a/ethcore/service/src/lib.rs b/ethcore/service/src/lib.rs index d85a377cde2..7ded2af79ef 100644 --- a/ethcore/service/src/lib.rs +++ b/ethcore/service/src/lib.rs @@ -19,6 +19,7 @@ extern crate ethcore; extern crate ethcore_io as io; extern crate ethcore_private_tx; extern crate ethcore_sync as sync; +extern crate ethereum_types; extern crate kvdb; extern crate stop_guard; diff --git a/ethcore/service/src/service.rs b/ethcore/service/src/service.rs index 1ffb0d621b4..a6bbc4e1073 100644 --- a/ethcore/service/src/service.rs +++ b/ethcore/service/src/service.rs @@ -21,6 +21,7 @@ use std::path::Path; use std::time::Duration; use ansi_term::Colour; +use ethereum_types::H256; use io::{IoContext, TimerToken, IoHandler, IoService, IoError}; use stop_guard::StopGuard; @@ -54,12 +55,24 @@ impl PrivateTxService { } impl PrivateTxHandler for PrivateTxService { - fn import_private_transaction(&self, rlp: &[u8]) -> Result<(), String> { - self.provider.import_private_transaction(rlp).map_err(|e| e.to_string()) + fn import_private_transaction(&self, rlp: &[u8]) -> Result { + match self.provider.import_private_transaction(rlp) { + Ok(import_result) => Ok(import_result), + Err(err) => { + warn!(target: "privatetx", "Unable to import private transaction packet: {}", err); + bail!(err.to_string()) + } + } } - fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result<(), String> { - self.provider.import_signed_private_transaction(rlp).map_err(|e| e.to_string()) + fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result { + match self.provider.import_signed_private_transaction(rlp) { + Ok(import_result) => Ok(import_result), + Err(err) => { + warn!(target: "privatetx", "Unable to import signed private transaction packet: {}", err); + bail!(err.to_string()) + } + } } } diff --git a/ethcore/src/client/chain_notify.rs b/ethcore/src/client/chain_notify.rs index 62de03591df..ebfe7bdef5c 100644 --- a/ethcore/src/client/chain_notify.rs +++ b/ethcore/src/client/chain_notify.rs @@ -26,9 +26,9 @@ pub enum ChainMessageType { /// Consensus message Consensus(Vec), /// Message with private transaction - PrivateTransaction(Vec), + PrivateTransaction(H256, Vec), /// Message with signed private transaction - SignedPrivateTransaction(Vec), + SignedPrivateTransaction(H256, Vec), } /// Route type to indicate whether it is enacted or retracted. diff --git a/ethcore/src/test_helpers.rs b/ethcore/src/test_helpers.rs index c873db5d14f..06cfe703050 100644 --- a/ethcore/src/test_helpers.rs +++ b/ethcore/src/test_helpers.rs @@ -490,8 +490,8 @@ impl ChainNotify for TestNotify { fn broadcast(&self, message: ChainMessageType) { let data = match message { ChainMessageType::Consensus(data) => data, - ChainMessageType::SignedPrivateTransaction(data) => data, - ChainMessageType::PrivateTransaction(data) => data, + ChainMessageType::SignedPrivateTransaction(_, data) => data, + ChainMessageType::PrivateTransaction(_, data) => data, }; self.messages.write().push(data); } diff --git a/ethcore/sync/src/api.rs b/ethcore/sync/src/api.rs index 606aa39b31f..5128df3b26f 100644 --- a/ethcore/sync/src/api.rs +++ b/ethcore/sync/src/api.rs @@ -38,7 +38,8 @@ use std::net::{SocketAddr, AddrParseError}; use std::str::FromStr; use parking_lot::RwLock; use chain::{ETH_PROTOCOL_VERSION_63, ETH_PROTOCOL_VERSION_62, - PAR_PROTOCOL_VERSION_1, PAR_PROTOCOL_VERSION_2, PAR_PROTOCOL_VERSION_3}; + PAR_PROTOCOL_VERSION_1, PAR_PROTOCOL_VERSION_2, PAR_PROTOCOL_VERSION_3, + PRIVATE_TRANSACTION_PACKET, SIGNED_PRIVATE_TRANSACTION_PACKET}; use light::client::AsLightClient; use light::Provider; use light::net::{ @@ -522,8 +523,10 @@ impl ChainNotify for EthSync { let mut sync_io = NetSyncIo::new(context, &*self.eth_handler.chain, &*self.eth_handler.snapshot_service, &self.eth_handler.overlay); match message_type { ChainMessageType::Consensus(message) => self.eth_handler.sync.write().propagate_consensus_packet(&mut sync_io, message), - ChainMessageType::PrivateTransaction(message) => self.eth_handler.sync.write().propagate_private_transaction(&mut sync_io, message), - ChainMessageType::SignedPrivateTransaction(message) => self.eth_handler.sync.write().propagate_signed_private_transaction(&mut sync_io, message), + ChainMessageType::PrivateTransaction(transaction_hash, message) => + self.eth_handler.sync.write().propagate_private_transaction(&mut sync_io, transaction_hash, PRIVATE_TRANSACTION_PACKET, message), + ChainMessageType::SignedPrivateTransaction(transaction_hash, message) => + self.eth_handler.sync.write().propagate_private_transaction(&mut sync_io, transaction_hash, SIGNED_PRIVATE_TRANSACTION_PACKET, message), } }); } diff --git a/ethcore/sync/src/chain/handler.rs b/ethcore/sync/src/chain/handler.rs index 8547be7b3ff..c30c60a7cee 100644 --- a/ethcore/sync/src/chain/handler.rs +++ b/ethcore/sync/src/chain/handler.rs @@ -552,6 +552,7 @@ impl SyncHandler { asking_hash: None, ask_time: Instant::now(), last_sent_transactions: HashSet::new(), + last_sent_private_transactions: HashSet::new(), expired: false, confirmation: if sync.fork_block.is_none() { ForkConfirmation::Confirmed } else { ForkConfirmation::Unconfirmed }, asking_snapshot_data: None, @@ -631,21 +632,29 @@ impl SyncHandler { } /// Called when peer sends us signed private transaction packet - fn on_signed_private_transaction(sync: &ChainSync, _io: &mut SyncIo, peer_id: PeerId, r: &Rlp) -> Result<(), DownloaderImportError> { + fn on_signed_private_transaction(sync: &mut ChainSync, _io: &mut SyncIo, peer_id: PeerId, r: &Rlp) -> Result<(), DownloaderImportError> { if !sync.peers.get(&peer_id).map_or(false, |p| p.can_sync()) { trace!(target: "sync", "{} Ignoring packet from unconfirmed/unknown peer", peer_id); return Ok(()); } trace!(target: "sync", "Received signed private transaction packet from {:?}", peer_id); - if let Err(e) = sync.private_tx_handler.import_signed_private_transaction(r.as_raw()) { - trace!(target: "sync", "Ignoring the message, error queueing: {}", e); - } + match sync.private_tx_handler.import_signed_private_transaction(r.as_raw()) { + Ok(transaction_hash) => { + //don't send the packet back + if let Some(ref mut peer) = sync.peers.get_mut(&peer_id) { + peer.last_sent_private_transactions.insert(transaction_hash); + } + }, + Err(e) => { + trace!(target: "sync", "Ignoring the message, error queueing: {}", e); + } + } Ok(()) } /// Called when peer sends us new private transaction packet - fn on_private_transaction(sync: &ChainSync, _io: &mut SyncIo, peer_id: PeerId, r: &Rlp) -> Result<(), DownloaderImportError> { + fn on_private_transaction(sync: &mut ChainSync, _io: &mut SyncIo, peer_id: PeerId, r: &Rlp) -> Result<(), DownloaderImportError> { if !sync.peers.get(&peer_id).map_or(false, |p| p.can_sync()) { trace!(target: "sync", "{} Ignoring packet from unconfirmed/unknown peer", peer_id); return Ok(()); @@ -653,9 +662,17 @@ impl SyncHandler { trace!(target: "sync", "Received private transaction packet from {:?}", peer_id); - if let Err(e) = sync.private_tx_handler.import_private_transaction(r.as_raw()) { - trace!(target: "sync", "Ignoring the message, error queueing: {}", e); - } + match sync.private_tx_handler.import_private_transaction(r.as_raw()) { + Ok(transaction_hash) => { + //don't send the packet back + if let Some(ref mut peer) = sync.peers.get_mut(&peer_id) { + peer.last_sent_private_transactions.insert(transaction_hash); + } + }, + Err(e) => { + trace!(target: "sync", "Ignoring the message, error queueing: {}", e); + } + } Ok(()) } } diff --git a/ethcore/sync/src/chain/mod.rs b/ethcore/sync/src/chain/mod.rs index b53c38b43af..06bd3febab6 100644 --- a/ethcore/sync/src/chain/mod.rs +++ b/ethcore/sync/src/chain/mod.rs @@ -173,8 +173,8 @@ pub const SNAPSHOT_MANIFEST_PACKET: u8 = 0x12; pub const GET_SNAPSHOT_DATA_PACKET: u8 = 0x13; pub const SNAPSHOT_DATA_PACKET: u8 = 0x14; pub const CONSENSUS_DATA_PACKET: u8 = 0x15; -const PRIVATE_TRANSACTION_PACKET: u8 = 0x16; -const SIGNED_PRIVATE_TRANSACTION_PACKET: u8 = 0x17; +pub const PRIVATE_TRANSACTION_PACKET: u8 = 0x16; +pub const SIGNED_PRIVATE_TRANSACTION_PACKET: u8 = 0x17; const MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD: usize = 3; @@ -324,6 +324,8 @@ pub struct PeerInfo { ask_time: Instant, /// Holds a set of transactions recently sent to this peer to avoid spamming. last_sent_transactions: HashSet, + /// Holds a set of private transactions and their signatures recently sent to this peer to avoid spamming. + last_sent_private_transactions: HashSet, /// Pending request is expired and result should be ignored expired: bool, /// Peer fork confirmation status @@ -353,6 +355,10 @@ impl PeerInfo { self.expired = true; } } + + fn reset_private_stats(&mut self) { + self.last_sent_private_transactions.clear(); + } } #[cfg(not(test))] @@ -1056,8 +1062,15 @@ impl ChainSync { self.peers.iter().filter_map(|(id, p)| if p.protocol_version >= PAR_PROTOCOL_VERSION_2.0 { Some(*id) } else { None }).collect() } - fn get_private_transaction_peers(&self) -> Vec { - self.peers.iter().filter_map(|(id, p)| if p.protocol_version >= PAR_PROTOCOL_VERSION_3.0 { Some(*id) } else { None }).collect() + fn get_private_transaction_peers(&self, transaction_hash: &H256) -> Vec { + self.peers.iter().filter_map( + |(id, p)| if p.protocol_version >= PAR_PROTOCOL_VERSION_3.0 + && !p.last_sent_private_transactions.contains(transaction_hash) { + Some(*id) + } else { + None + } + ).collect() } /// Maintain other peers. Send out any new blocks and transactions @@ -1085,8 +1098,10 @@ impl ChainSync { // Select random peer to re-broadcast transactions to. let peer = random::new().gen_range(0, self.peers.len()); trace!(target: "sync", "Re-broadcasting transactions to a random peer."); - self.peers.values_mut().nth(peer).map(|peer_info| - peer_info.last_sent_transactions.clear() + self.peers.values_mut().nth(peer).map(|peer_info| { + peer_info.last_sent_transactions.clear(); + peer_info.reset_private_stats() + } ); } } @@ -1127,13 +1142,8 @@ impl ChainSync { } /// Broadcast private transaction message to peers. - pub fn propagate_private_transaction(&mut self, io: &mut SyncIo, packet: Bytes) { - SyncPropagator::propagate_private_transaction(self, io, packet); - } - - /// Broadcast signed private transaction message to peers. - pub fn propagate_signed_private_transaction(&mut self, io: &mut SyncIo, packet: Bytes) { - SyncPropagator::propagate_signed_private_transaction(self, io, packet); + pub fn propagate_private_transaction(&mut self, io: &mut SyncIo, transaction_hash: H256, packet_id: PacketId, packet: Bytes) { + SyncPropagator::propagate_private_transaction(self, io, transaction_hash, packet_id, packet); } } @@ -1256,6 +1266,7 @@ pub mod tests { asking_hash: None, ask_time: Instant::now(), last_sent_transactions: HashSet::new(), + last_sent_private_transactions: HashSet::new(), expired: false, confirmation: super::ForkConfirmation::Confirmed, snapshot_number: None, diff --git a/ethcore/sync/src/chain/propagator.rs b/ethcore/sync/src/chain/propagator.rs index aabe90c9376..102a3171246 100644 --- a/ethcore/sync/src/chain/propagator.rs +++ b/ethcore/sync/src/chain/propagator.rs @@ -36,8 +36,6 @@ use super::{ CONSENSUS_DATA_PACKET, NEW_BLOCK_HASHES_PACKET, NEW_BLOCK_PACKET, - PRIVATE_TRANSACTION_PACKET, - SIGNED_PRIVATE_TRANSACTION_PACKET, TRANSACTIONS_PACKET, }; @@ -293,20 +291,14 @@ impl SyncPropagator { } /// Broadcast private transaction message to peers. - pub fn propagate_private_transaction(sync: &mut ChainSync, io: &mut SyncIo, packet: Bytes) { - let lucky_peers = ChainSync::select_random_peers(&sync.get_private_transaction_peers()); + pub fn propagate_private_transaction(sync: &mut ChainSync, io: &mut SyncIo, transaction_hash: H256, packet_id: PacketId, packet: Bytes) { + let lucky_peers = ChainSync::select_random_peers(&sync.get_private_transaction_peers(&transaction_hash)); trace!(target: "sync", "Sending private transaction packet to {:?}", lucky_peers); for peer_id in lucky_peers { - SyncPropagator::send_packet(io, peer_id, PRIVATE_TRANSACTION_PACKET, packet.clone()); - } - } - - /// Broadcast signed private transaction message to peers. - pub fn propagate_signed_private_transaction(sync: &mut ChainSync, io: &mut SyncIo, packet: Bytes) { - let lucky_peers = ChainSync::select_random_peers(&sync.get_private_transaction_peers()); - trace!(target: "sync", "Sending signed private transaction packet to {:?}", lucky_peers); - for peer_id in lucky_peers { - SyncPropagator::send_packet(io, peer_id, SIGNED_PRIVATE_TRANSACTION_PACKET, packet.clone()); + if let Some(ref mut peer) = sync.peers.get_mut(&peer_id) { + peer.last_sent_private_transactions.insert(transaction_hash); + } + SyncPropagator::send_packet(io, peer_id, packet_id, packet.clone()); } } @@ -428,6 +420,7 @@ mod tests { asking_hash: None, ask_time: Instant::now(), last_sent_transactions: HashSet::new(), + last_sent_private_transactions: HashSet::new(), expired: false, confirmation: ForkConfirmation::Confirmed, snapshot_number: None, diff --git a/ethcore/sync/src/private_tx.rs b/ethcore/sync/src/private_tx.rs index d7434c8bd5b..03928c22d6f 100644 --- a/ethcore/sync/src/private_tx.rs +++ b/ethcore/sync/src/private_tx.rs @@ -15,26 +15,29 @@ // along with Parity. If not, see . use parking_lot::Mutex; +use ethereum_types::H256; /// Trait which should be implemented by a private transaction handler. pub trait PrivateTxHandler: Send + Sync + 'static { /// Function called on new private transaction received. - fn import_private_transaction(&self, rlp: &[u8]) -> Result<(), String>; + /// Returns the hash of the imported transaction + fn import_private_transaction(&self, rlp: &[u8]) -> Result; /// Function called on new signed private transaction received. - fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result<(), String>; + /// Returns the hash of the imported transaction + fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result; } /// Nonoperative private transaction handler. pub struct NoopPrivateTxHandler; impl PrivateTxHandler for NoopPrivateTxHandler { - fn import_private_transaction(&self, _rlp: &[u8]) -> Result<(), String> { - Ok(()) + fn import_private_transaction(&self, _rlp: &[u8]) -> Result { + Ok(H256::default()) } - fn import_signed_private_transaction(&self, _rlp: &[u8]) -> Result<(), String> { - Ok(()) + fn import_signed_private_transaction(&self, _rlp: &[u8]) -> Result { + Ok(H256::default()) } } @@ -48,13 +51,13 @@ pub struct SimplePrivateTxHandler { } impl PrivateTxHandler for SimplePrivateTxHandler { - fn import_private_transaction(&self, rlp: &[u8]) -> Result<(), String> { + fn import_private_transaction(&self, rlp: &[u8]) -> Result { self.txs.lock().push(rlp.to_vec()); - Ok(()) + Ok(H256::default()) } - fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result<(), String> { + fn import_signed_private_transaction(&self, rlp: &[u8]) -> Result { self.signed_txs.lock().push(rlp.to_vec()); - Ok(()) + Ok(H256::default()) } } diff --git a/ethcore/sync/src/tests/helpers.rs b/ethcore/sync/src/tests/helpers.rs index 59db57dc5ea..d75d71ea90a 100644 --- a/ethcore/sync/src/tests/helpers.rs +++ b/ethcore/sync/src/tests/helpers.rs @@ -33,7 +33,7 @@ use ethcore::test_helpers; use sync_io::SyncIo; use io::{IoChannel, IoContext, IoHandler}; use api::WARP_SYNC_PROTOCOL_ID; -use chain::{ChainSync, ETH_PROTOCOL_VERSION_63, PAR_PROTOCOL_VERSION_3}; +use chain::{ChainSync, ETH_PROTOCOL_VERSION_63, PAR_PROTOCOL_VERSION_3, PRIVATE_TRANSACTION_PACKET, SIGNED_PRIVATE_TRANSACTION_PACKET}; use SyncConfig; use private_tx::SimplePrivateTxHandler; @@ -230,8 +230,10 @@ impl EthPeer where C: FlushingBlockChainClient { let mut io = TestIo::new(&*self.chain, &self.snapshot_service, &self.queue, None); match message { ChainMessageType::Consensus(data) => self.sync.write().propagate_consensus_packet(&mut io, data), - ChainMessageType::PrivateTransaction(data) => self.sync.write().propagate_private_transaction(&mut io, data), - ChainMessageType::SignedPrivateTransaction(data) => self.sync.write().propagate_signed_private_transaction(&mut io, data), + ChainMessageType::PrivateTransaction(transaction_hash, data) => + self.sync.write().propagate_private_transaction(&mut io, transaction_hash, PRIVATE_TRANSACTION_PACKET, data), + ChainMessageType::SignedPrivateTransaction(transaction_hash, data) => + self.sync.write().propagate_private_transaction(&mut io, transaction_hash, SIGNED_PRIVATE_TRANSACTION_PACKET, data), } } diff --git a/ethcore/sync/src/tests/private.rs b/ethcore/sync/src/tests/private.rs index 04b414b94c3..9b39aed76d7 100644 --- a/ethcore/sync/src/tests/private.rs +++ b/ethcore/sync/src/tests/private.rs @@ -24,11 +24,12 @@ use ethcore::CreateContractAddress; use transaction::{Transaction, Action}; use ethcore::executive::{contract_address}; use ethcore::test_helpers::{push_block_with_transactions}; -use ethcore_private_tx::{Provider, ProviderConfig, NoopEncryptor, Importer}; +use ethcore_private_tx::{Provider, ProviderConfig, NoopEncryptor, Importer, SignedPrivateTransaction}; use ethcore::account_provider::AccountProvider; use ethkey::{KeyPair}; use tests::helpers::{TestNet, TestIoHandler}; use rustc_hex::FromHex; +use rlp::Rlp; use SyncConfig; fn seal_spec() -> Spec { @@ -144,6 +145,8 @@ fn send_private_transaction() { //process signed response let signed_private_transaction = received_signed_private_transactions[0].clone(); assert!(pm0.import_signed_private_transaction(&signed_private_transaction).is_ok()); + let signature: SignedPrivateTransaction = Rlp::new(&signed_private_transaction).as_val().unwrap(); + assert!(pm0.process_signature(&signature).is_ok()); let local_transactions = net.peer(0).miner.local_transactions(); assert_eq!(local_transactions.len(), 1); } diff --git a/miner/src/pool/local_transactions.rs b/miner/src/pool/local_transactions.rs index a1c69ef2253..a71d9244cd6 100644 --- a/miner/src/pool/local_transactions.rs +++ b/miner/src/pool/local_transactions.rs @@ -20,7 +20,7 @@ use std::{fmt, sync::Arc}; use ethereum_types::H256; use linked_hash_map::LinkedHashMap; -use pool::VerifiedTransaction as Transaction; +use pool::{VerifiedTransaction as Transaction, ScoredTransaction}; use txpool::{self, VerifiedTransaction}; /// Status of local transaction. diff --git a/miner/src/pool/mod.rs b/miner/src/pool/mod.rs index 4a1223226d4..ccfbba7f800 100644 --- a/miner/src/pool/mod.rs +++ b/miner/src/pool/mod.rs @@ -24,10 +24,10 @@ use txpool; mod listener; mod queue; mod ready; -mod scoring; pub mod client; pub mod local_transactions; +pub mod scoring; pub mod verifier; #[cfg(test)] @@ -84,7 +84,7 @@ impl PendingSettings { /// Transaction priority. #[derive(Debug, PartialEq, Eq, PartialOrd, Clone, Copy)] -pub(crate) enum Priority { +pub enum Priority { /// Regular transactions received over the network. (no priority boost) Regular, /// Transactions from retracted blocks (medium priority) @@ -108,6 +108,18 @@ impl Priority { } } +/// Scoring properties for verified transaction. +pub trait ScoredTransaction { + /// Gets transaction priority. + fn priority(&self) -> Priority; + + /// Gets transaction gas price. + fn gas_price(&self) -> &U256; + + /// Gets transaction nonce. + fn nonce(&self) -> U256; +} + /// Verified transaction stored in the pool. #[derive(Debug, PartialEq, Eq)] pub struct VerifiedTransaction { @@ -137,11 +149,6 @@ impl VerifiedTransaction { } } - /// Gets transaction priority. - pub(crate) fn priority(&self) -> Priority { - self.priority - } - /// Gets transaction insertion id. pub(crate) fn insertion_id(&self) -> usize { self.insertion_id @@ -175,3 +182,19 @@ impl txpool::VerifiedTransaction for VerifiedTransaction { &self.sender } } + +impl ScoredTransaction for VerifiedTransaction { + fn priority(&self) -> Priority { + self.priority + } + + /// Gets transaction gas price. + fn gas_price(&self) -> &U256 { + &self.transaction.gas_price + } + + /// Gets transaction nonce. + fn nonce(&self) -> U256 { + self.transaction.nonce + } +} diff --git a/miner/src/pool/scoring.rs b/miner/src/pool/scoring.rs index dbe3c08f453..61fcf4e418a 100644 --- a/miner/src/pool/scoring.rs +++ b/miner/src/pool/scoring.rs @@ -31,7 +31,7 @@ use std::cmp; use ethereum_types::U256; use txpool::{self, scoring}; -use super::{verifier, PrioritizationStrategy, VerifiedTransaction}; +use super::{verifier, PrioritizationStrategy, VerifiedTransaction, ScoredTransaction}; /// Transaction with the same (sender, nonce) can be replaced only if /// `new_gas_price > old_gas_price + old_gas_price >> SHIFT` @@ -67,23 +67,23 @@ impl NonceAndGasPrice { } } -impl txpool::Scoring for NonceAndGasPrice { +impl

txpool::Scoring

for NonceAndGasPrice where P: ScoredTransaction + txpool::VerifiedTransaction { type Score = U256; type Event = (); - fn compare(&self, old: &VerifiedTransaction, other: &VerifiedTransaction) -> cmp::Ordering { - old.transaction.nonce.cmp(&other.transaction.nonce) + fn compare(&self, old: &P, other: &P) -> cmp::Ordering { + old.nonce().cmp(&other.nonce()) } - fn choose(&self, old: &VerifiedTransaction, new: &VerifiedTransaction) -> scoring::Choice { - if old.transaction.nonce != new.transaction.nonce { + fn choose(&self, old: &P, new: &P) -> scoring::Choice { + if old.nonce() != new.nonce() { return scoring::Choice::InsertNew } - let old_gp = old.transaction.gas_price; - let new_gp = new.transaction.gas_price; + let old_gp = old.gas_price(); + let new_gp = new.gas_price(); - let min_required_gp = bump_gas_price(old_gp); + let min_required_gp = bump_gas_price(*old_gp); match min_required_gp.cmp(&new_gp) { cmp::Ordering::Greater => scoring::Choice::RejectNew, @@ -91,7 +91,7 @@ impl txpool::Scoring for NonceAndGasPrice { } } - fn update_scores(&self, txs: &[txpool::Transaction], scores: &mut [U256], change: scoring::Change) { + fn update_scores(&self, txs: &[txpool::Transaction

], scores: &mut [U256], change: scoring::Change) { use self::scoring::Change; match change { @@ -101,7 +101,7 @@ impl txpool::Scoring for NonceAndGasPrice { assert!(i < txs.len()); assert!(i < scores.len()); - scores[i] = txs[i].transaction.transaction.gas_price; + scores[i] = *txs[i].transaction.gas_price(); let boost = match txs[i].priority() { super::Priority::Local => 15, super::Priority::Retracted => 10, @@ -122,10 +122,10 @@ impl txpool::Scoring for NonceAndGasPrice { } } - fn should_replace(&self, old: &VerifiedTransaction, new: &VerifiedTransaction) -> scoring::Choice { - if old.sender == new.sender { + fn should_replace(&self, old: &P, new: &P) -> scoring::Choice { + if old.sender() == new.sender() { // prefer earliest transaction - match new.transaction.nonce.cmp(&old.transaction.nonce) { + match new.nonce().cmp(&old.nonce()) { cmp::Ordering::Less => scoring::Choice::ReplaceOld, cmp::Ordering::Greater => scoring::Choice::RejectNew, cmp::Ordering::Equal => self.choose(old, new), @@ -134,8 +134,8 @@ impl txpool::Scoring for NonceAndGasPrice { // accept local transactions over the limit scoring::Choice::InsertNew } else { - let old_score = (old.priority(), old.transaction.gas_price); - let new_score = (new.priority(), new.transaction.gas_price); + let old_score = (old.priority(), old.gas_price()); + let new_score = (new.priority(), new.gas_price()); if new_score > old_score { scoring::Choice::ReplaceOld } else { @@ -144,7 +144,7 @@ impl txpool::Scoring for NonceAndGasPrice { } } - fn should_ignore_sender_limit(&self, new: &VerifiedTransaction) -> bool { + fn should_ignore_sender_limit(&self, new: &P) -> bool { new.priority().is_local() } } @@ -185,12 +185,8 @@ mod tests { }; let keypair = Random.generate().unwrap(); - let txs = vec![tx1, tx2, tx3, tx4].into_iter().enumerate().map(|(i, tx)| { - let verified = tx.unsigned().sign(keypair.secret(), None).verified(); - txpool::Transaction { - insertion_id: i as u64, - transaction: Arc::new(verified), - } + let txs = vec![tx1, tx2, tx3, tx4].into_iter().map(|tx| { + tx.unsigned().sign(keypair.secret(), None).verified() }).collect::>(); assert_eq!(scoring.should_replace(&txs[0], &txs[1]), RejectNew); @@ -213,11 +209,7 @@ mod tests { gas_price: 1, ..Default::default() }; - let verified_tx = tx.signed().verified(); - txpool::Transaction { - insertion_id: 0, - transaction: Arc::new(verified_tx), - } + tx.signed().verified() }; let tx_regular_high_gas = { let tx = Tx { @@ -225,11 +217,7 @@ mod tests { gas_price: 10, ..Default::default() }; - let verified_tx = tx.signed().verified(); - txpool::Transaction { - insertion_id: 1, - transaction: Arc::new(verified_tx), - } + tx.signed().verified() }; let tx_local_low_gas = { let tx = Tx { @@ -239,10 +227,7 @@ mod tests { }; let mut verified_tx = tx.signed().verified(); verified_tx.priority = ::pool::Priority::Local; - txpool::Transaction { - insertion_id: 2, - transaction: Arc::new(verified_tx), - } + verified_tx }; let tx_local_high_gas = { let tx = Tx { @@ -252,10 +237,7 @@ mod tests { }; let mut verified_tx = tx.signed().verified(); verified_tx.priority = ::pool::Priority::Local; - txpool::Transaction { - insertion_id: 3, - transaction: Arc::new(verified_tx), - } + verified_tx }; assert_eq!(scoring.should_replace(&tx_regular_low_gas, &tx_regular_high_gas), ReplaceOld); From 74ce0f738e0cd09b8422ba5d7847105654143676 Mon Sep 17 00:00:00 2001 From: Wei Tang Date: Wed, 29 Aug 2018 23:17:18 +0800 Subject: [PATCH 13/15] Add block reward contract config to ethash and allow off-chain contracts (#9312) This adds block reward contract config to ethash. A new config `blockRewardContractCode` is also added to both Aura and ethash. When specified, it will execute the code directly and overrides any `blockRewardContractAddress` config. Having this `blockRewardContractCode` config allows chains to deploy hard fork by simply replacing the current config value, without the need from us to support any `multi` block reward scheme. --- ethcore/src/engines/authority_round/mod.rs | 28 +++--- ethcore/src/engines/block_reward.rs | 101 +++++++++++++------- ethcore/src/engines/mod.rs | 40 ++++++++ ethcore/src/engines/null_engine.rs | 2 +- ethcore/src/ethereum/ethash.rs | 104 ++++++++++++++------- ethcore/src/machine.rs | 61 ++++++++---- json/src/spec/authority_round.rs | 6 +- json/src/spec/ethash.rs | 19 +++- 8 files changed, 253 insertions(+), 108 deletions(-) diff --git a/ethcore/src/engines/authority_round/mod.rs b/ethcore/src/engines/authority_round/mod.rs index 0c4906be565..d810f5a9c84 100644 --- a/ethcore/src/engines/authority_round/mod.rs +++ b/ethcore/src/engines/authority_round/mod.rs @@ -100,7 +100,11 @@ impl From for AuthorityRoundParams { immediate_transitions: p.immediate_transitions.unwrap_or(false), block_reward: p.block_reward.map_or_else(Default::default, Into::into), block_reward_contract_transition: p.block_reward_contract_transition.map_or(0, Into::into), - block_reward_contract: p.block_reward_contract_address.map(BlockRewardContract::new), + block_reward_contract: match (p.block_reward_contract_code, p.block_reward_contract_address) { + (Some(code), _) => Some(BlockRewardContract::new_from_code(Arc::new(code.into()))), + (_, Some(address)) => Some(BlockRewardContract::new_from_address(address.into())), + (None, None) => None, + }, maximum_uncle_count_transition: p.maximum_uncle_count_transition.map_or(0, Into::into), maximum_uncle_count: p.maximum_uncle_count.map_or(0, Into::into), empty_steps_transition: p.empty_steps_transition.map_or(u64::max_value(), |n| ::std::cmp::max(n.into(), 1)), @@ -1043,7 +1047,7 @@ impl Engine for AuthorityRound { /// Apply the block reward on finalisation of the block. fn on_close_block(&self, block: &mut ExecutedBlock) -> Result<(), Error> { - let mut benefactors = Vec::new(); + let mut beneficiaries = Vec::new(); if block.header().number() >= self.empty_steps_transition { let empty_steps = if block.header().seal().is_empty() { // this is a new block, calculate rewards based on the empty steps messages we have accumulated @@ -1069,32 +1073,22 @@ impl Engine for AuthorityRound { for empty_step in empty_steps { let author = empty_step.author()?; - benefactors.push((author, RewardKind::EmptyStep)); + beneficiaries.push((author, RewardKind::EmptyStep)); } } let author = *block.header().author(); - benefactors.push((author, RewardKind::Author)); + beneficiaries.push((author, RewardKind::Author)); let rewards: Vec<_> = match self.block_reward_contract { Some(ref c) if block.header().number() >= self.block_reward_contract_transition => { - // NOTE: this logic should be moved to a function when another - // engine needs support for block reward contract. - let mut call = |to, data| { - let result = self.machine.execute_as_system( - block, - to, - U256::max_value(), // unbounded gas? maybe make configurable. - Some(data), - ); - result.map_err(|e| format!("{}", e)) - }; + let mut call = super::default_system_or_code_call(&self.machine, block); - let rewards = c.reward(&benefactors, &mut call)?; + let rewards = c.reward(&beneficiaries, &mut call)?; rewards.into_iter().map(|(author, amount)| (author, RewardKind::External, amount)).collect() }, _ => { - benefactors.into_iter().map(|(author, reward_kind)| (author, reward_kind, self.block_reward)).collect() + beneficiaries.into_iter().map(|(author, reward_kind)| (author, reward_kind, self.block_reward)).collect() }, }; diff --git a/ethcore/src/engines/block_reward.rs b/ethcore/src/engines/block_reward.rs index 7144ed78d64..9488465e548 100644 --- a/ethcore/src/engines/block_reward.rs +++ b/ethcore/src/engines/block_reward.rs @@ -21,33 +21,48 @@ use ethabi; use ethabi::ParamType; use ethereum_types::{H160, Address, U256}; +use std::sync::Arc; +use hash::keccak; use error::Error; use machine::WithRewards; use parity_machine::{Machine, WithBalances}; use trace; -use super::SystemCall; +use types::BlockNumber; +use super::{SystemOrCodeCall, SystemOrCodeCallKind}; use_contract!(block_reward_contract, "BlockReward", "res/contracts/block_reward.json"); /// The kind of block reward. /// Depending on the consensus engine the allocated block reward might have /// different semantics which could lead e.g. to different reward values. -#[repr(u8)] #[derive(PartialEq, Eq, Clone, Copy, Debug)] pub enum RewardKind { /// Reward attributed to the block author. - Author = 0, - /// Reward attributed to the block uncle(s). - Uncle = 1, + Author, /// Reward attributed to the author(s) of empty step(s) included in the block (AuthorityRound engine). - EmptyStep = 2, + EmptyStep, /// Reward attributed by an external protocol (e.g. block reward contract). - External = 3, + External, + /// Reward attributed to the block uncle(s) with given difference. + Uncle(u8), +} + +impl RewardKind { + /// Create `RewardKind::Uncle` from given current block number and uncle block number. + pub fn uncle(number: BlockNumber, uncle: BlockNumber) -> Self { + RewardKind::Uncle(if number > uncle && number - uncle <= u8::max_value().into() { (number - uncle) as u8 } else { 0 }) + } } impl From for u16 { fn from(reward_kind: RewardKind) -> Self { - reward_kind as u16 + match reward_kind { + RewardKind::Author => 0, + RewardKind::EmptyStep => 2, + RewardKind::External => 3, + + RewardKind::Uncle(depth) => 100 + depth as u16, + } } } @@ -55,7 +70,7 @@ impl Into for RewardKind { fn into(self) -> trace::RewardType { match self { RewardKind::Author => trace::RewardType::Block, - RewardKind::Uncle => trace::RewardType::Uncle, + RewardKind::Uncle(_) => trace::RewardType::Uncle, RewardKind::EmptyStep => trace::RewardType::EmptyStep, RewardKind::External => trace::RewardType::External, } @@ -63,38 +78,50 @@ impl Into for RewardKind { } /// A client for the block reward contract. +#[derive(PartialEq, Debug)] pub struct BlockRewardContract { - /// Address of the contract. - address: Address, + kind: SystemOrCodeCallKind, block_reward_contract: block_reward_contract::BlockReward, } impl BlockRewardContract { - /// Create a new block reward contract client targeting the given address. - pub fn new(address: Address) -> BlockRewardContract { + /// Create a new block reward contract client targeting the system call kind. + pub fn new(kind: SystemOrCodeCallKind) -> BlockRewardContract { BlockRewardContract { - address, + kind, block_reward_contract: block_reward_contract::BlockReward::default(), } } - /// Calls the block reward contract with the given benefactors list (and associated reward kind) + /// Create a new block reward contract client targeting the contract address. + pub fn new_from_address(address: Address) -> BlockRewardContract { + Self::new(SystemOrCodeCallKind::Address(address)) + } + + /// Create a new block reward contract client targeting the given code. + pub fn new_from_code(code: Arc>) -> BlockRewardContract { + let code_hash = keccak(&code[..]); + + Self::new(SystemOrCodeCallKind::Code(code, code_hash)) + } + + /// Calls the block reward contract with the given beneficiaries list (and associated reward kind) /// and returns the reward allocation (address - value). The block reward contract *must* be /// called by the system address so the `caller` must ensure that (e.g. using /// `machine.execute_as_system`). pub fn reward( &self, - benefactors: &[(Address, RewardKind)], - caller: &mut SystemCall, + beneficiaries: &[(Address, RewardKind)], + caller: &mut SystemOrCodeCall, ) -> Result, Error> { let reward = self.block_reward_contract.functions().reward(); let input = reward.input( - benefactors.iter().map(|&(address, _)| H160::from(address)), - benefactors.iter().map(|&(_, ref reward_kind)| u16::from(*reward_kind)), + beneficiaries.iter().map(|&(address, _)| H160::from(address)), + beneficiaries.iter().map(|&(_, ref reward_kind)| u16::from(*reward_kind)), ); - let output = caller(self.address, input) + let output = caller(self.kind.clone(), input) .map_err(Into::into) .map_err(::engines::EngineError::FailedSystemCall)?; @@ -127,7 +154,7 @@ impl BlockRewardContract { } } -/// Applies the given block rewards, i.e. adds the given balance to each benefactors' address. +/// Applies the given block rewards, i.e. adds the given balance to each beneficiary' address. /// If tracing is enabled the operations are recorded. pub fn apply_block_rewards( rewards: &[(Address, RewardKind, U256)], @@ -139,7 +166,7 @@ pub fn apply_block_rewards( } let rewards: Vec<_> = rewards.into_iter().map(|&(a, k, r)| (a, k.into(), r)).collect(); - machine.note_rewards(block, &rewards) + machine.note_rewards(block, &rewards) } #[cfg(test)] @@ -149,6 +176,7 @@ mod test { use spec::Spec; use test_helpers::generate_dummy_client_with_spec_and_accounts; + use engines::SystemOrCodeCallKind; use super::{BlockRewardContract, RewardKind}; #[test] @@ -161,7 +189,7 @@ mod test { let machine = Spec::new_test_machine(); // the spec has a block reward contract defined at the given address - let block_reward_contract = BlockRewardContract::new( + let block_reward_contract = BlockRewardContract::new_from_address( "0000000000000000000000000000000000000042".into(), ); @@ -172,30 +200,35 @@ mod test { vec![], ).unwrap(); - let result = machine.execute_as_system( - block.block_mut(), - to, - U256::max_value(), - Some(data), - ); + let result = match to { + SystemOrCodeCallKind::Address(to) => { + machine.execute_as_system( + block.block_mut(), + to, + U256::max_value(), + Some(data), + ) + }, + _ => panic!("Test reward contract is created by an address, we never reach this branch."), + }; result.map_err(|e| format!("{}", e)) }; - // if no benefactors are given no rewards are attributed + // if no beneficiaries are given no rewards are attributed assert!(block_reward_contract.reward(&vec![], &mut call).unwrap().is_empty()); // the contract rewards (1000 + kind) for each benefactor - let benefactors = vec![ + let beneficiaries = vec![ ("0000000000000000000000000000000000000033".into(), RewardKind::Author), - ("0000000000000000000000000000000000000034".into(), RewardKind::Uncle), + ("0000000000000000000000000000000000000034".into(), RewardKind::Uncle(1)), ("0000000000000000000000000000000000000035".into(), RewardKind::EmptyStep), ]; - let rewards = block_reward_contract.reward(&benefactors, &mut call).unwrap(); + let rewards = block_reward_contract.reward(&beneficiaries, &mut call).unwrap(); let expected = vec![ ("0000000000000000000000000000000000000033".into(), U256::from(1000)), - ("0000000000000000000000000000000000000034".into(), U256::from(1000 + 1)), + ("0000000000000000000000000000000000000034".into(), U256::from(1000 + 101)), ("0000000000000000000000000000000000000035".into(), U256::from(1000 + 2)), ]; diff --git a/ethcore/src/engines/mod.rs b/ethcore/src/engines/mod.rs index 8f67a039f56..167df0fd2b8 100644 --- a/ethcore/src/engines/mod.rs +++ b/ethcore/src/engines/mod.rs @@ -133,6 +133,46 @@ pub enum Seal { /// A system-calling closure. Enacts calls on a block's state from the system address. pub type SystemCall<'a> = FnMut(Address, Vec) -> Result, String> + 'a; +/// A system-calling closure. Enacts calls on a block's state with code either from an on-chain contract, or hard-coded EVM or WASM (if enabled on-chain) codes. +pub type SystemOrCodeCall<'a> = FnMut(SystemOrCodeCallKind, Vec) -> Result, String> + 'a; + +/// Kind of SystemOrCodeCall, this is either an on-chain address, or code. +#[derive(PartialEq, Debug, Clone)] +pub enum SystemOrCodeCallKind { + /// On-chain address. + Address(Address), + /// Hard-coded code. + Code(Arc>, H256), +} + +/// Default SystemOrCodeCall implementation. +pub fn default_system_or_code_call<'a>(machine: &'a ::machine::EthereumMachine, block: &'a mut ::block::ExecutedBlock) -> impl FnMut(SystemOrCodeCallKind, Vec) -> Result, String> + 'a { + move |to, data| { + let result = match to { + SystemOrCodeCallKind::Address(address) => { + machine.execute_as_system( + block, + address, + U256::max_value(), + Some(data), + ) + }, + SystemOrCodeCallKind::Code(code, code_hash) => { + machine.execute_code_as_system( + block, + None, + Some(code), + Some(code_hash), + U256::max_value(), + Some(data), + ) + }, + }; + + result.map_err(|e| format!("{}", e)) + } +} + /// Type alias for a function we can get headers by hash through. pub type Headers<'a, H> = Fn(H256) -> Option + 'a; diff --git a/ethcore/src/engines/null_engine.rs b/ethcore/src/engines/null_engine.rs index f9e698307d5..af5aedaac37 100644 --- a/ethcore/src/engines/null_engine.rs +++ b/ethcore/src/engines/null_engine.rs @@ -89,7 +89,7 @@ impl Engine for NullEngine for u in LiveBlock::uncles(&*block) { let uncle_author = u.author(); let result_uncle_reward = (reward * U256::from(8 + u.number() - number)).shr(3); - rewards.push((*uncle_author, RewardKind::Uncle, result_uncle_reward)); + rewards.push((*uncle_author, RewardKind::uncle(number, u.number()), result_uncle_reward)); } block_reward::apply_block_rewards(&rewards, block, &self.machine) diff --git a/ethcore/src/ethereum/ethash.rs b/ethcore/src/ethereum/ethash.rs index 16069c32751..6afc65a1146 100644 --- a/ethcore/src/ethereum/ethash.rs +++ b/ethcore/src/ethereum/ethash.rs @@ -19,7 +19,7 @@ use std::cmp; use std::collections::BTreeMap; use std::sync::Arc; use hash::{KECCAK_EMPTY_LIST_RLP}; -use engines::block_reward::{self, RewardKind}; +use engines::block_reward::{self, BlockRewardContract, RewardKind}; use ethash::{self, quick_get_difficulty, slow_hash_block_number, EthashManager, OptimizeFor}; use ethereum_types::{H256, H64, U256, Address}; use unexpected::{OutOfBounds, Mismatch}; @@ -124,6 +124,10 @@ pub struct EthashParams { pub expip2_transition: u64, /// EXPIP-2 duration limit pub expip2_duration_limit: u64, + /// Block reward contract transition block. + pub block_reward_contract_transition: u64, + /// Block reward contract. + pub block_reward_contract: Option, } impl From for EthashParams { @@ -154,6 +158,12 @@ impl From for EthashParams { eip649_reward: p.eip649_reward.map(Into::into), expip2_transition: p.expip2_transition.map_or(u64::max_value(), Into::into), expip2_duration_limit: p.expip2_duration_limit.map_or(30, Into::into), + block_reward_contract_transition: p.block_reward_contract_transition.map_or(0, Into::into), + block_reward_contract: match (p.block_reward_contract_code, p.block_reward_contract_address) { + (Some(code), _) => Some(BlockRewardContract::new_from_code(Arc::new(code.into()))), + (_, Some(address)) => Some(BlockRewardContract::new_from_address(address.into())), + (None, None) => None, + }, } } } @@ -231,51 +241,71 @@ impl Engine for Arc { let author = *LiveBlock::header(&*block).author(); let number = LiveBlock::header(&*block).number(); - let mut rewards = Vec::new(); + let rewards = match self.ethash_params.block_reward_contract { + Some(ref c) if number >= self.ethash_params.block_reward_contract_transition => { + let mut beneficiaries = Vec::new(); - // Applies EIP-649 reward. - let reward = if number >= self.ethash_params.eip649_transition { - self.ethash_params.eip649_reward.unwrap_or(self.ethash_params.block_reward) - } else { - self.ethash_params.block_reward - }; + beneficiaries.push((author, RewardKind::Author)); + for u in LiveBlock::uncles(&*block) { + let uncle_author = u.author(); + beneficiaries.push((*uncle_author, RewardKind::uncle(number, u.number()))); + } - // Applies ECIP-1017 eras. - let eras_rounds = self.ethash_params.ecip1017_era_rounds; - let (eras, reward) = ecip1017_eras_block_reward(eras_rounds, reward, number); + let mut call = engines::default_system_or_code_call(&self.machine, block); - let n_uncles = LiveBlock::uncles(&*block).len(); + let rewards = c.reward(&beneficiaries, &mut call)?; + rewards.into_iter().map(|(author, amount)| (author, RewardKind::External, amount)).collect() + }, + _ => { + let mut rewards = Vec::new(); - // Bestow block rewards. - let mut result_block_reward = reward + reward.shr(5) * U256::from(n_uncles); + // Applies EIP-649 reward. + let reward = if number >= self.ethash_params.eip649_transition { + self.ethash_params.eip649_reward.unwrap_or(self.ethash_params.block_reward) + } else { + self.ethash_params.block_reward + }; - if number >= self.ethash_params.mcip3_transition { - result_block_reward = self.ethash_params.mcip3_miner_reward; + // Applies ECIP-1017 eras. + let eras_rounds = self.ethash_params.ecip1017_era_rounds; + let (eras, reward) = ecip1017_eras_block_reward(eras_rounds, reward, number); - let ubi_contract = self.ethash_params.mcip3_ubi_contract; - let ubi_reward = self.ethash_params.mcip3_ubi_reward; - let dev_contract = self.ethash_params.mcip3_dev_contract; - let dev_reward = self.ethash_params.mcip3_dev_reward; + let n_uncles = LiveBlock::uncles(&*block).len(); - rewards.push((author, RewardKind::Author, result_block_reward)); - rewards.push((ubi_contract, RewardKind::External, ubi_reward)); - rewards.push((dev_contract, RewardKind::External, dev_reward)); + // Bestow block rewards. + let mut result_block_reward = reward + reward.shr(5) * U256::from(n_uncles); - } else { - rewards.push((author, RewardKind::Author, result_block_reward)); - } + if number >= self.ethash_params.mcip3_transition { + result_block_reward = self.ethash_params.mcip3_miner_reward; - // Bestow uncle rewards. - for u in LiveBlock::uncles(&*block) { - let uncle_author = u.author(); - let result_uncle_reward = if eras == 0 { - (reward * U256::from(8 + u.number() - number)).shr(3) - } else { - reward.shr(5) - }; + let ubi_contract = self.ethash_params.mcip3_ubi_contract; + let ubi_reward = self.ethash_params.mcip3_ubi_reward; + let dev_contract = self.ethash_params.mcip3_dev_contract; + let dev_reward = self.ethash_params.mcip3_dev_reward; - rewards.push((*uncle_author, RewardKind::Uncle, result_uncle_reward)); - } + rewards.push((author, RewardKind::Author, result_block_reward)); + rewards.push((ubi_contract, RewardKind::External, ubi_reward)); + rewards.push((dev_contract, RewardKind::External, dev_reward)); + + } else { + rewards.push((author, RewardKind::Author, result_block_reward)); + } + + // Bestow uncle rewards. + for u in LiveBlock::uncles(&*block) { + let uncle_author = u.author(); + let result_uncle_reward = if eras == 0 { + (reward * U256::from(8 + u.number() - number)).shr(3) + } else { + reward.shr(5) + }; + + rewards.push((*uncle_author, RewardKind::uncle(number, u.number()), result_uncle_reward)); + } + + rewards + }, + }; block_reward::apply_block_rewards(&rewards, block, &self.machine) } @@ -512,6 +542,8 @@ mod tests { eip649_reward: None, expip2_transition: u64::max_value(), expip2_duration_limit: 30, + block_reward_contract: None, + block_reward_contract_transition: 0, } } diff --git a/ethcore/src/machine.rs b/ethcore/src/machine.rs index fdeed4c8e10..89a8aa49258 100644 --- a/ethcore/src/machine.rs +++ b/ethcore/src/machine.rs @@ -29,10 +29,10 @@ use header::{BlockNumber, Header, ExtendedHeader}; use spec::CommonParams; use state::{CleanupMode, Substate}; use trace::{NoopTracer, NoopVMTracer, Tracer, ExecutiveTracer, RewardType, Tracing}; -use transaction::{self, SYSTEM_ADDRESS, UnverifiedTransaction, SignedTransaction}; +use transaction::{self, SYSTEM_ADDRESS, UNSIGNED_SENDER, UnverifiedTransaction, SignedTransaction}; use tx_filter::TransactionFilter; -use ethereum_types::{U256, Address}; +use ethereum_types::{U256, H256, Address}; use rlp::Rlp; use vm::{CallType, ActionParams, ActionValue, ParamsType}; use vm::{EnvInfo, Schedule, CreateContractAddress}; @@ -122,6 +122,35 @@ impl EthereumMachine { contract_address: Address, gas: U256, data: Option>, + ) -> Result, Error> { + let (code, code_hash) = { + let state = block.state(); + + (state.code(&contract_address)?, + state.code_hash(&contract_address)?) + }; + + self.execute_code_as_system( + block, + Some(contract_address), + code, + code_hash, + gas, + data + ) + } + + /// Same as execute_as_system, but execute code directly. If contract address is None, use the null sender + /// address. If code is None, then this function has no effect. The call is executed without finalization, and does + /// not form a transaction. + pub fn execute_code_as_system( + &self, + block: &mut ExecutedBlock, + contract_address: Option

, + code: Option>>, + code_hash: Option, + gas: U256, + data: Option> ) -> Result, Error> { let env_info = { let mut env_info = block.env_info(); @@ -130,31 +159,27 @@ impl EthereumMachine { }; let mut state = block.state_mut(); + let params = ActionParams { - code_address: contract_address.clone(), - address: contract_address.clone(), - sender: SYSTEM_ADDRESS.clone(), - origin: SYSTEM_ADDRESS.clone(), - gas: gas, + code_address: contract_address.unwrap_or(UNSIGNED_SENDER), + address: contract_address.unwrap_or(UNSIGNED_SENDER), + sender: SYSTEM_ADDRESS, + origin: SYSTEM_ADDRESS, + gas, gas_price: 0.into(), value: ActionValue::Transfer(0.into()), - code: state.code(&contract_address)?, - code_hash: state.code_hash(&contract_address)?, - data: data, + code, + code_hash, + data, call_type: CallType::Call, params_type: ParamsType::Separate, }; let schedule = self.schedule(env_info.number); let mut ex = Executive::new(&mut state, &env_info, self, &schedule); let mut substate = Substate::new(); - let res = ex.call(params, &mut substate, &mut NoopTracer, &mut NoopVMTracer); - let output = match res { - Ok(res) => res.return_data.to_vec(), - Err(e) => { - warn!("Encountered error on making system call: {}", e); - Vec::new() - } - }; + + let res = ex.call(params, &mut substate, &mut NoopTracer, &mut NoopVMTracer).map_err(|e| ::engines::EngineError::FailedSystemCall(format!("{}", e)))?; + let output = res.return_data.to_vec(); Ok(output) } diff --git a/json/src/spec/authority_round.rs b/json/src/spec/authority_round.rs index e355c6fe951..b4fcf4d78cb 100644 --- a/json/src/spec/authority_round.rs +++ b/json/src/spec/authority_round.rs @@ -16,8 +16,9 @@ //! Authority params deserialization. -use ethereum_types::Address; +use hash::Address; use uint::Uint; +use bytes::Bytes; use super::ValidatorSet; /// Authority params deserialization. @@ -51,6 +52,9 @@ pub struct AuthorityRoundParams { /// overrides the static block reward definition). #[serde(rename="blockRewardContractAddress")] pub block_reward_contract_address: Option
, + /// Block reward code. This overrides the block reward contract address. + #[serde(rename="blockRewardContractCode")] + pub block_reward_contract_code: Option, /// Block at which maximum uncle count should be considered. #[serde(rename="maximumUncleCountTransition")] pub maximum_uncle_count_transition: Option, diff --git a/json/src/spec/ethash.rs b/json/src/spec/ethash.rs index fd6b9fca59f..95c5eeff3cd 100644 --- a/json/src/spec/ethash.rs +++ b/json/src/spec/ethash.rs @@ -17,6 +17,7 @@ //! Ethash params deserialization. use uint::{self, Uint}; +use bytes::Bytes; use hash::Address; /// Deserializable doppelganger of EthashParams. @@ -48,6 +49,16 @@ pub struct EthashParams { /// Reward per block in wei. #[serde(rename="blockReward")] pub block_reward: Option, + /// Block at which the block reward contract should start being used. + #[serde(rename="blockRewardContractTransition")] + pub block_reward_contract_transition: Option, + /// Block reward contract address (setting the block reward contract + /// overrides all other block reward parameters). + #[serde(rename="blockRewardContractAddress")] + pub block_reward_contract_address: Option
, + /// Block reward code. This overrides the block reward contract address. + #[serde(rename="blockRewardContractCode")] + pub block_reward_contract_code: Option, /// See main EthashParams docs. #[serde(rename="daoHardforkTransition")] @@ -183,7 +194,7 @@ mod tests { let deserialized: Ethash = serde_json::from_str(s).unwrap(); assert_eq!(deserialized, Ethash { - params: EthashParams{ + params: EthashParams { minimum_difficulty: Uint(U256::from(0x020000)), difficulty_bound_divisor: Uint(U256::from(0x0800)), difficulty_increment_divisor: None, @@ -191,6 +202,9 @@ mod tests { duration_limit: Some(Uint(U256::from(0x0d))), homestead_transition: Some(Uint(U256::from(0x42))), block_reward: Some(Uint(U256::from(0x100))), + block_reward_contract_address: None, + block_reward_contract_code: None, + block_reward_contract_transition: None, dao_hardfork_transition: Some(Uint(U256::from(0x08))), dao_hardfork_beneficiary: Some(Address(H160::from("0xabcabcabcabcabcabcabcabcabcabcabcabcabca"))), dao_hardfork_accounts: Some(vec![ @@ -256,6 +270,9 @@ mod tests { duration_limit: None, homestead_transition: None, block_reward: None, + block_reward_contract_address: None, + block_reward_contract_code: None, + block_reward_contract_transition: None, dao_hardfork_transition: None, dao_hardfork_beneficiary: None, dao_hardfork_accounts: None, From 3e4a5255200b5babfbb76017866b0da77d3762fd Mon Sep 17 00:00:00 2001 From: Thibaut Sardan <33178835+Tbaut@users.noreply.github.com> Date: Wed, 29 Aug 2018 17:56:25 +0200 Subject: [PATCH 14/15] Update hardcoded sync (#9421) - Update foundation hardcoded header to block 6219777 - Update ropsten hardcoded header to block 3917825 - Update kovan hardcoded header to block 8511489 --- ethcore/res/ethereum/foundation.json | 142 +++++++++++++- ethcore/res/ethereum/kovan.json | 280 ++++++++++++++++++++++++++- ethcore/res/ethereum/ropsten.json | 155 ++++++++++++++- 3 files changed, 568 insertions(+), 9 deletions(-) diff --git a/ethcore/res/ethereum/foundation.json b/ethcore/res/ethereum/foundation.json index 48e174eedd9..3332ad6f971 100644 --- a/ethcore/res/ethereum/foundation.json +++ b/ethcore/res/ethereum/foundation.json @@ -174,8 +174,8 @@ "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" }, "hardcodedSync": { - "header": "f9020ba0bb120488b73cb04a3c423dfa6760eb631165fa3d6d8e0b1be360d3e2a00add78a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479452e44f279f4203dcf680395379e5f9990a69f13ca02d2cbb3c43370257122898259f1e06da38fd23031f74b40d6bd022b037ecd3daa0107b3a01662ca77aa1c72cde45bd66c062d781310d7a364e5b6442bd791431cea011e451bfe7b89addb96020182e0e7eb448d0a66303924a2835a149247bea4188b90100000000200004820000130000020000322004002000140000801000081208000880800200100000000a080000000800400000000000080240800000020028a100000400410000001088008008400080000100000000200000000220804028000000302000000180200c004644000000000101800000040040020200100020100220200a00000000280002011040000000000080a00000002002048000100001000206000000c000002010000004800030000000000300884008121000208020080000020280000000010104002000004000002084000c08402820000004000001841109008410040410080080004121044080800800000000004858040000c000870c64944ccfd130835aa801837a212d8320dc6b845b452c758a7777772e62772e636f6da02078861f3b30aaea6fad290d86919dd7542433a56edc1af557426cbd2eacd60d88a68a26940894b23f", - "totalDifficulty": "5282739680501645457616", + "header": "f9020ba0c7139a7f4b14c2e12dbe34aeb92711b37747bf8698ecdd6f2c3b1f5f3840e288a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479452e44f279f4203dcf680395379e5f9990a69f13ca06e817f8a9b206f93a393da76a3ece2a74b98eaecc4dae0cfa8f409455e88ccb4a0d739197170d2bc6bbb24fac0ce695982090702082fe1541bb7634f018dfe87b3a038503b7299fb1c113a78b4a3a5dfd997ef32e7fbf722fc178dfcb21e1af1f7f5b9010000020000000000000008000000000010000000000000000000000000000200000002000000000008000000000000000000000000000000000000000000000000000000000000000001004008000000000000000000000000000080000000000008000000080000000000008000000000000000000000000000040210004000000010000000400000000001040000000200000000000000440008000000040000000000000000000000000010000000000000000000000000000000800000000000100002000000000000000000002000000000000000000000000000000000000080000000100000000000000000040400000000000000004000000000000000870c90e059b181c6835ee8018379fb9583065150845b833d198a7777772e62772e636f6da0171fc2d066507ea10c8c2d7bedd5ccc3f0dfb4d590a3998a614013326c0b213a88b4fd884826187393", + "totalDifficulty": "6255555800944520547241", "CHTs": [ "0x0eb474b7721727204978e92e27d31cddff56471911e424a4c8271c35f9c982cc", "0xe10e94515fb5ffb7ffa9bf50db4a959b3f50c2ff75e0b8bd5f5e038749e52a11", @@ -3077,7 +3077,143 @@ "0x8a5cb6854c19a865f51e3ee9eaf8e843a97b272f6467634ba40e547a435ef624", "0x9afe42a0dffca8ec063c83908fd6237d6130c9dfeab57078bdd02b6ac6d0ea07", "0xa05cc6108b475d3e68e280e98f514cfb6df4f004e1b7708fcfd4528d346bea6b", - "0x71f10879b875caefab46669e8525b9c0487bbe3247e43a6cdb1dedbfb4d4ba33" + "0x71f10879b875caefab46669e8525b9c0487bbe3247e43a6cdb1dedbfb4d4ba33", + "0x7debdafd2b1410469fd76e2a39dcd1b8f400292f5359ecd39186a5be0f9e44b8", + "0x076f8741b668f3f715a7142f5c1592ffb58ca13e8612edc340619a1ca6ce3f2a", + "0xe312bb85cd945eb45f72868e6651633964f22ce3ffcbd0f3701bf57ac0c13edd", + "0xe6eae7aad7fed9c690a14eb7aa107c83a5e54736940ac6d82e251181e1103eae", + "0x794c8ade9063600442f83d4be14d7bbb191c692314253a2c62dcb33f3e7d28bd", + "0x6fd9db41dc1074a4349a6d3881757ad931bdf9ec261fc4e0a5ba9cfa5fafbf41", + "0x37557ead7100564651992a919949af3f39f3eacc03aa0ea158dd59abc4e2a93d", + "0x7a3b01cf210dfc42e4e41c95af1af357d15409fecae888abf4fa35149f44ed2e", + "0x89e952ed9afda8aa0ff80ceb90e5857dc7fc3cb6259ac6089da5fed49223cf42", + "0xf1ae8d3769142a1f3c8bc0ecf8454f05b09354a2ad5c921d0a6845417bc2f244", + "0xa04e022b2afd83a1e14d0d67595070b29f196dcfc05d92b2ce7c582d67af4a00", + "0xa3a0c85dfc3d8055a29de34492b3e231691bb6e9ca06684a205837ab9870c72e", + "0xee2529ad0af60748cc0bd8d97189d878643a52710dd200f79f27d8a4a4c8c093", + "0xac88fe07805a058f1341ed89795f0e5e9ace050de0742f863c1c9c63b17aed12", + "0x8fddc716ab43397541f6b22644949c6d7214b01ca8223310a34840df37c47746", + "0x3cbb4d1a2ea6fce170de64b1f4bbb8d9239fe191018b737e62eda77aeb7d0306", + "0x3807421ad84d6f2f9d5d830364a0455d6846e114b87cd08a3cb9a33e40c7c084", + "0x283199cbf0e1c997025bd36522619417d9e6184073b830fb22c3de106a68b598", + "0xcbc85a7ee2122abad27b1f5a07bd15d864af382a62fd552092b7feec4d2e4856", + "0x84586eae7108f2e1217d53be81f26bff7cc35409c64c1af827bb5263d813302f", + "0x5d2d93535aa7db5e2204e419ecf4d7c0647ce40522806bd24ecf348eb49a1f17", + "0x3a6118c84312f120c0d3e4ae4ec0b8103448da31953fa27a8400ed7e4875897e", + "0x4a0d1639274d31dc5d9eb49338b204e9cc9610913c0320e7da74a02ed2486fb6", + "0xa7770b7cf4f83c387344cff6ddd3441dd131333df0f210a57a271fc1a61f08f8", + "0x35066ce607eafc9bdd3386097a98c844bda683ce29faf25708b1a2f3e25c9aca", + "0x7068b1f5c017ff5b995f2c65db7daed3cd9fd0e658322ee2debbba7de3ebdd72", + "0xfbcf8cec0ed675d12ab4fab45754fd00a329d19ad77afe4413a0581bb6dab8dd", + "0x78035e412813b0a3f7e3f46bafe436b57b83278499fe8b8a1bdb3066f538d0b1", + "0x28bfbcd7eb38793a1ea711b6dd73c55da0faee3a9707c743737f0692dc6b56a1", + "0x9fb1607599329b61570a0daeb7b1a93210ac286e02a64ae309e1950cdd76c5e5", + "0x8903d98196291218a50fe03a5cd51592e1b458315491d735187cb39d0e21f397", + "0x97aa20d84821d36a44ce18eff501df95549c8266220ad612d0ba9d293d65271a", + "0x3245e35a2c6d46002dbf5bfedee31fd28a88409edbc55ee1ef7972a249b1cce2", + "0xd6149428168dbf78ab2f706f041a61e3074e3d18ed47bd343d54c3e4f2e051fe", + "0x9f4a04d0e3a2b8773a258bfcdee5242e44761aa1a1d2f9a7162f04a2b85de6c8", + "0x0162a3449c2be2a0a45a771027e5fb67169c5a56c4ef6f386093fa79f9942fce", + "0x5c1196a72f2c6fb5771e0390ef25499acd52c6aeeac3615ff17dcf2c405069ca", + "0x319097551b7df30bc67f5ee6f068cf3b34e71371b91635e9009af7acfe97e12d", + "0x8b89e2e091824d0c3276128543991cf7c35a82bed4b92b4e67aeb4652033a823", + "0x75d6dd431d5568625946b3578e68d47523518cb2e141369aee5273fb7cdc4013", + "0xc473b7b23c164759487af8c96865f0f3af3f3e40e9607665f94f937c3334f604", + "0x1f61783925fc03f3f5465f37aad2276c466ddb91a47db036f042c0b2edbbf643", + "0x3f6370976528cc31e4d099711329d5c670a2538623621b164d470fd7ee2cfb17", + "0x3705bd7548fcdeb0dbb3162394edb2adcd5128524eb837b71a04ff0c9c21c22b", + "0xe15e99e427efae16230af47e59ff89b76df25db44fa2dfe95d2419b6de35ee7a", + "0xbb4a17b5e4879b52e6a4838df9525ae01b168197db2910caea805011588559a7", + "0x60e6cc67da461bafef5c3b7e4f9e964a24c3146c4fc22c2e046301e457d1cdfc", + "0x72ef37234da6fe157e0e87b11528c3e41d115f10f1dcfce60e54d83c1406989a", + "0x63c3a73812427f73be785d4690a6ff81912d3ac17c8d76970ded284383ec97a2", + "0xf88c0457e00ec47c27ddc6d99fe6c0b82bf9249018a96b1c7e6b90e906adfaad", + "0x9258fa102e450844e091b58dfff2fd05b8fa099d53667626e340fda7ff7f9bd1", + "0x68adeaa1063528386db31407596442f815769731da8b560272ba3e1b8817a11a", + "0x222a41b6e681d2acf8a9fbd9310d34145a3c49160c29681b95c85176abadddf2", + "0x00745c4bb8cf5fb63700444f9258eb94e4b527ba8739fed4bbd7b13083854929", + "0xf27199044245d103b296bb81188982baa87175843b06424e0b5fe9dc6e005705", + "0xe00c3bfee8d71930faae9ca720f25f342ad166f438c07b0664c0c2d6b9f82c58", + "0x05e2bd6caee8a926c0f61bd0e5553b304656cf4749f54bb45b72d2a84fcf2378", + "0xcff8627a22e37d1087c3b858079a4f90a1fac65cd397dce26be8c3efe2515e1e", + "0x4885a5f8fb2e367a9edb32b02ad4390a4ef8f317a0e7d3bcfd5eccb9d8d05b1d", + "0x194638cb01699bfdb67b09ce5e2e8d5dbcfd403ae325a7a0311923ba5207c5b6", + "0x20e7c0ec1db7ca6adeeeb0c7950b7911e1627bb173425c4eaa105cfcc9c43db6", + "0x1010eb2f28d04da879c67f4c9e51fd480753081eac6123c5161502f3180764f0", + "0x09a0d0aac41a4e2fadb48c1751011fb46774f10ee263567d823e275aa0a33ec3", + "0xde190e4dde46d967a4e32ecfda15323c7f4081f40d5f245916055af44b008e81", + "0x21870402899f8c05003a175f2c0d37fdfb52c9bda3eea3ac8616c832fd8ac173", + "0xbec735492b66890aeb0ccb5611a84a908477269c2336e076c42cbb620cee8861", + "0xb9daeedd739766f82a95505a9e71daa503a5f0c3ee1a6a97f842b2ffb6ddc6ad", + "0x9f30d706fc86c7322a10a3c572df2ef4f2f49bcb49c5afc2c6299cf051e2e38f", + "0x9dc1d641fab155885db408328e60c06d97a0e3327b64a18d46e7ced7cacff446", + "0x252594a4da3cc4e794d954e88c15a2c847dd84d895eca06b9365a85a71fc46eb", + "0x979ea7682319cb0674066b10eb7919a87ad1ec0618ddb24723f13c55899cbb6c", + "0x570b4dce3b4613a8266688291f06bafa9d3b7828e3e6a9bfd27e7d2f3ba59125", + "0x39e97e3c20af2dcfa347ec193cae2c00100effd4f3987bfe21973db8c5e11fc5", + "0xf66b0859b41bf7dc53dd23762ecc593f4572a8a9f82b4c51257af1eda6344bcf", + "0xfc32a5b84537ed5947874eca72d189f74676855be5ded4d32eac2ebc0c3a3306", + "0xa89c577f12bedcc225cdfbf86ebe8112aef97b9c34a504dda52cea4f40f72be1", + "0x67e4b71c08fd93f1c37cc60826c5d8c18e14b89a7f271e0828b114de39aff694", + "0x342279789cd6a721f5f4866e3a16d19f88b24ac277563ec7b0716ecdf8dace3d", + "0x220a5babb290dc16440f4b01bc117c41af5073110b1375cae0317f9fd364d44c", + "0xbc1019eec10ff493e095915f66d46f0855b55aa681352774dc300d54147efb00", + "0xa9cc80ffc08c2b4c38fa178e3ac56ef982d0487b7139ed241fdda3894f4b67e2", + "0xb35267a4b6020a2e53e112f4ba7807ec50ba78a3b2d64fc3e3bfe918bb25bd9e", + "0xd5021c0c199dbb1b61e5e0d4bcd59fd064ddb6d6611421e57161f9c37fba91f0", + "0xea0af5bbcc015b06951c9ff3df3c233f4bdd6ef8eab97f0b3a9e10b01efd7a5a", + "0xc060f20f45851e6de2fe1dcb6d8996add5335c34892f84a1db711d0c47c5da4a", + "0xbd30199b078305b459565598fea6740a4d187e4813d3887f72d7958ebcc8da48", + "0xeafa07e96737a0e53ee7a09bc1b394ae4c1ec6c937901a8066b0cb6b6908aff5", + "0x68abed404e0a66ded23d152fc5544b14b0a4fe918a51b667f2dc827575c12136", + "0x27da4b5111565097dba6e6dade4f0275ea8234b50f391aa270bb3cfb361e6331", + "0x85126b95e3532b5045710f60d0aa3f6b0e801130722bc7d85a67479e0b1fba25", + "0x658afe5a3d3b0916372785f952d92069045b747fc67be6c8bce3d695a434d058", + "0x9017ad7725867d37402da5ef1727289abcdf9982a0c7dbd7fa6f175ef8ff5f90", + "0x892a34192371a1de6d4c2ceabeff5aa9e26c3f41e74260d305872f366bdb73d1", + "0x786a848c13f84c87ca554654f0ab35f7fea5014669d919595e087e6463c47253", + "0xf1235641dd0ac33ed19e39ee58e282f2e0dd8edc0949e0510f2c65469d85e1ef", + "0xa55273939373f7a4052d8bc36501fe17d9b571fd170041ada7b4385d49ecb5e6", + "0x79ba81c39c2e7dad9768cdbf754242dc2b5b74fde750c32ee153ab121a633406", + "0x74ef128355bdc842c1b109d4b1d3dc6f85a21a110b32fe1b4ce71cd282a20ba5", + "0xf0a05137693ab9dae7624cbf4abce1554d4921014cda61584d649c66e9eb09f0", + "0x5566a8d7495aae3cb42e66bcc683bb194f53894e3281812484bf36da583b7d9a", + "0xbe65f10ed3eb5e862850eb4e30c73172f5417634b087b1dece7df97dda2857c9", + "0x5ad40161397a2d0f53d71284d878e711b6d73930d2a79a789e4bf08223e9eeb9", + "0x1a88c4e12ba89abf67c8f8de223964e4f3d0ee8951b9a1851c9172b278b956e8", + "0xc1bae22721de65936975b20b01380c7707cb603622106a166c1472bdcaaaf94b", + "0x68106522840c17b0e93b0cacfdbb1d2c5b219cb90aa61a5d0b04a0291caa4b82", + "0x0e8a214b18b92fc025bd374a8d9859244b3855a7d3a364d1a0e092546a17b2b0", + "0x584294fe2fc330adfcbbaf251480f2c727da2d38ce54ffcc1b363adb9c0fdb0a", + "0xb86b9042acd8d6dd4eaa967b3dce6e4e49cbad95f4a7cfea95299da0cb3aecab", + "0x99d4f215dabbe6bcd894d4ce7ee6d16a4543c38d9ab19a4f3b933f37eaa7fa5f", + "0xd078bfda23ecf966b656aabf0bb69d27902d33773b2471145268426846a5a1b8", + "0x87234af13d71e681c987c38fcc3661889b406e298e3a1ffbd5115fe6fe6e65c8", + "0xb9f78fdabc8cd4fbb08fd1e1d00ed552c4bc7cba0672ce91c52deb2834a7b223", + "0x76704df0766b51c9327acf08ffd35fb9c3f041b73808094b0d4ae1f7e7d7d0b8", + "0xb5e4ec16c993e2ade6e951099512deb2f8fd07956e4d97dfc639a7eeef96d336", + "0xc9c919862f5a82a5381e7def2883049fbbb99f24be9902152323811e32db40bc", + "0x8df21edd784b8b776ffd603a6da87a6602e6424cb3905d977b909a7b10160abe", + "0xd5c524bf24c3c8f2e9cccd5aff20b96fbb8f2045f37ef1014eeea1b82787361f", + "0xb64756c799db5e1eca16416cb833605916a9bbbcb3c82a54def34f16e6fcfdaa", + "0xfac273bab2facbfc28e2c53c03a44e69ddadfbf57b08499bb43855abae456f1c", + "0x43f8990cdc4735bc47fd89cd23c9a99d5453885b1903d4b3185b6b0413735ff6", + "0x7420a69d819df5f4106fc9e315e8c125c8876a3a1cae8ef7198a921c121792ec", + "0x696861af19fa56f07d5e2eacffa3508cc3638b7acfe4789c002929dfdc47fc3a", + "0x86c80d2150fca01803a41a900e66b764ce82af2e0ec19fde53971afbb6390ead", + "0x9cf14af73b44b555ceec13119567723ae8e8af927a6f24846635ff5000acfec6", + "0x95b50e682e4b6b2a08606a62bce427682ce484c2056d891dc206fae9e062d3f0", + "0x789755e975f432a059cb5cbeb067c8f9dfd97b54d8d1a45a1e47592d9eb770d6", + "0x87b682b780890ad3d542c76548d7d5287fb244291d62619ed079340293a245d6", + "0x8c0155b066048d9e95f5eb1b6ecfcc179feb71ee0426301b03f778573eeee7a7", + "0x0fe45f09ef25d05e5c42751a35714af86a5d7a4fa235956edaa36dd0e5ea7533", + "0x3b5ee72615fcc04f48cec067ba2d1d9bc0e64c4cdf9d5bcf97c1a999ae940245", + "0xcf773973676c9b0e31a549d78a6bbc79826c471b6fed42078d9b35a08d1d28a0", + "0xd5ecadc4dba308c0d790adea1e118d6984716600091d341276311631acfbb267", + "0xf344c0cf6516f0fa6617e48076726aefbdaaf5a31f67ad8199bc3f6e426bf904", + "0x3f3d2d33f36ba9009e9a72f3f5bbcb5df5392a19fc7afc8d37823aaf52b03477", + "0x346a89411f090d559ff90e670bf0a385b1b09f117fc9ffa18b09d3b6d5d8e45c", + "0x5bc5689e2b4572b8ceea472cc7827e22cbfd018920beebf5c5b25f65f5cd5357" ] }, "nodes": [ diff --git a/ethcore/res/ethereum/kovan.json b/ethcore/res/ethereum/kovan.json index 47f80082f17..28126713582 100644 --- a/ethcore/res/ethereum/kovan.json +++ b/ethcore/res/ethereum/kovan.json @@ -56,8 +56,8 @@ "gasLimit": "0x5B8D80" }, "hardcodedSync": { - "header": "f9023ea070413bfe3ceb9160c7dee87bf060a0cc5e324f7c539cfce4e78802ff805063b6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479400e4a10650e5a6d6001c38ff8e64f97016a1645ca0f8ac12c30b4fd0d27a1a50c090659014574b554ba6e9cdb76f57efbcfbd390a9a0b474ac6cc4673c17c5f511a8b43cc44dbb01bb028735830163667d7a3a2582b9a0bcd44b7c04fa24760df7d733ca8ecd99e8da89de0716e6017fffa434bfd7519ab901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000090fffffffffffffffffffffffffffffffd83755801837a11f88301d8de845b27471c96d583010b038650617269747986312e32362e31826c698416c9d1c7b8418bc805f23fb01fdd498b37df5519f49691d65160fe6a6794b8106e2ecc4782407f0dae3a512546b7d93e89bbb2a761c750553deeea1f9401231f56ae0ccb059201", - "totalDifficulty": "2654916374389120143910668097894183918476475680", + "header": "f9023ea00861b3771ffb84fce48b8ba3c54a09f81e91ccb38c401261f06d370098889a43a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479400e6d2b931f55a3f1701c7389d592a7778897879a071cc81d58cdd21d1e17f7389e55c530cd9f94cc15bb32af6477320682327dcffa06090021a7c09ae5e75e443410ebdb76de04f1eafb0ab910daae96ee6eec560eaa032510bf257dd03b11f3b4761b94b495a5b5a18cd6eb17c77785e0f46e2ffc882b901000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000400000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000090fffffffffffffffffffffffffffffffd8381e001837a12008306697a845b83bd6096d583010b068650617269747986312e32372e30826c698416e0ef58b841117e2088e2835bf2afcd5d48f42b3bf2a1f33435f21f089ead2a6bae7d01c1486e645b460bb3c726a827ff1eb50e0579f3410563bae090fc256cf1d8d594b82100", + "totalDifficulty": "2845866505151538604560067685603735513869853136", "CHTs": [ "0xdb9557458495268ddd69409fc1f66631ed5ff9bf6c479be6eabe5d83a460acac", "0xd413800c22172be6e0b7a36348c90098955991f119ddad32c5b928e8db4deb02", @@ -3940,7 +3940,281 @@ "0x36e94b03402f18c689f5234973ce1e626a82aac085dbdd682b51cce21f8c1872", "0x00abd1d34c7e55f58681866558cb844c11faa55e8cac70ede75811f55341cfde", "0x9983fc20e63e77ec0680522035b03167403681674ec62293cd6b7fe360c69157", - "0xe98b658fb8b6b7fba7463562f86348bf1e3534bc9148e8559423b3ee5ab68472" + "0xe98b658fb8b6b7fba7463562f86348bf1e3534bc9148e8559423b3ee5ab68472", + "0x77ea189d3a408a8c5b1792881d93ba7a471f90a976ed334cfbdf865ff94cf20f", + "0xfbe2fb93b3ecb384679870205f2be2f47140f8d832f841ba8653e00d68056c82", + "0x330f5f04314ae57097db5258d861b993cbdc2ca0522421917463b94636a99d1b", + "0xa59cbefffd0e1cff04bd4004d21fbb64c4ca542b98220487ccc79a39a6a0fba8", + "0xe08f21b0f5ab216c1009c904939515d0ef8d8ca80f8c3cd5465944bb6550e728", + "0x37f1db42a7e99ea8b6d7f9ec5bfb39f9613cf79d98345816533b85726c64dab8", + "0x8c226434c5115b7ac0f894d8c4389a1e38a3118fa0993b59a42291286b85bd7e", + "0xa9977dc362eba9d32d13188dffa6cd0178e0b8c29cc24a3c04cd6cb0781fc133", + "0x502a68fcd1c187d662ff6852179d7602b04b5ec3141ffb39841b571bbc6cce25", + "0x09a716a32626ad3443d7f5f6e5fce5145e8b962a531464e36aae6356ae93b15f", + "0x2c165daaf0e8a4433d8bf97091bb573f1a7cbc7e82f2af353af9c580fbffa8c6", + "0xd8b47ed7ed3bb85bf44ac2b5e5916bbb282353f71dea6e172434f05cb1276c4b", + "0x2bff102b5209c88370b2deaf941653c364a90b39a6c3402336b8b3c9b47731a9", + "0xf6011e416be00e243d9b6b57df1bec2458d271b2a8a964292521f6f8bc7355de", + "0x8f4a6d08359aa81c14391d8483bb8e621c24bac1e32b2c5c505d5617487bd06d", + "0xd2f89aa90547e1956491879dd5924a50142d76460532f00423cfe70f36546873", + "0x929b13fbde5275f999f145a273219d6f7f4c25ac29b4eab3f89d68305e7f4341", + "0x1e7e79d55594c3555eb7d90e4b5bcc8b59ecd98848884b81ac5c2d53bebd5cbb", + "0x11c2705c4a89fa9187f63dfc7a8bd4e2c087d6e937e777393f1e2856f6f00104", + "0x294b40af144ac5046bd2a5451d1cc9900858e0e351f3f78b50b2dbc203111600", + "0x26432c2ba81f919c085a4d2326c22bac341c17f456d8a93a1971c0fd741ea79a", + "0xc488baa35e04ed6bf20753fcf856f31a98c00be65d51b73c6a5490d18fb89d71", + "0x915e1768627ebbee21159a0562d8a5df69b05f057fdc8b4bb69bcadaeadf2ae6", + "0x2303b2be6801e3fe8c02c0d0355069240da4f617b47a4ce6bbfaaae56a1e0915", + "0x0e4af9d5e2cab6a62d9203979db08996bd1f111a315049859e2093e83e465262", + "0xe54756890992736382616fceb3999d22ae462b9b5f8b1da072277da2df6b5f4b", + "0xb5aa407d54e2ea651563333d3d330af0cf45a992bf89db0c229962454c4da4cf", + "0x88832a6db2c8c3d899dc59154ca323c08c5eaa8c05d142978490246e7b4ba7ad", + "0xdca9619220c991618eca5cb18b1c14c6fe4167614a8360c9e1737c93a129e714", + "0x3fad9a66462792be9f771b39b6f64e537683a74441b85b53f9c5e6a2fb687100", + "0x6dc5466ae00317d52511d9e153ffa37cb6060639a010c8db25b33aed702b486e", + "0x2a2ca3148598d3106bae23b6f66a24e3e93afeb576d283ba6822220780008e95", + "0x480f699ec76a16530e382c2e8d91c7edd3ec0d393ccdeb28c7d692eb52b0af96", + "0x082d3f5dcff2a41e2fad715d64eef361271648bd976d895b40765089c544b65e", + "0xed14dc5ed74dfdda8b45dfcfe99e8581767cac57505805a4945b79e2110bf92c", + "0x6890d1a6c90f5c1020ea4439957ea54df3cc22f14e22047f63c9a93696210b95", + "0x55ae4e30fc02206a0f389e03c0ef5b1409f8fe34d6a5dbd5487af608e3b82369", + "0xa709e308a7899591337d59ea459098364c1d6638202047583851acc5e6df5017", + "0xaed8371c95bccbe53effffee29f40a3e9656bc95cceefa364f7191415dbe15bd", + "0xa7450bb8e1ff3c7b53bada0cb8ae783e561a3ef74cd3be0d19cb9ac5ae807aea", + "0x9fdebd0ae46437e3421d7f0a95a76f47b34f0e0eda37d69c078ef5b44411c027", + "0xa18a165a98307fb994401171c41e375a4637fcf2beeaec8c18fd24304b068570", + "0x930f3cd1edb90c020c19a6f6a8b86c86981c3ebb8fc19073f09c345fa4b0ea6e", + "0xe4c0549658dbbf4f541c60606c8b85dba190f58e5af0bcfbed101e0ff4a8f455", + "0xcfd5428494534819ee14e002c69987fc54207917e83a2b6e97a1e1b801a5d939", + "0x0cc3a6a2a5de66f64f5491908b5585eb3464fda42c7a58a9100a8cd0ce763b04", + "0xcf635d0b4522c64a9b00902770ae33b618580983bb32b94134a69ca8f6c6258f", + "0x2cf5c41cf911dff328ef15253a7419733a104cf9cd1846af43d5fdcd914b321c", + "0x6d4ea125dbf83b8a9dcba40c4a81673a365ce81dd8eb1fc10510ee7e80a40b1f", + "0x9c68f243fdf1738f9218c30664318278afa8f461c13bf5e1bf30133d3fa6347e", + "0x62aa880648363c1f3b5bbba3e73dc58566870ddbde0911c5e11aca003a13b525", + "0x286a2b6f2e35c2a6900a02d37d82263dc55a3c40d6c40488258af61f8cb8b589", + "0x296136ec9fba18ad43a5435acbc3006e52a182aea1e3eb005f25ceaafde2d0de", + "0x9ebd2127fa6f5f43a03fcad8dea37bde3893f627b48e63b9e2c6e33af811a95a", + "0xe8e58e2b6fe45ba1881eef4ec1e1d954e12793ee68060d50b9c9502009725317", + "0xf831613def863279307f637be60e9608179e9a148557585034d20816ed7d5553", + "0x9ac311437bfcaa1c66e1c32fa18f8964ca9d9ffe3cb3be306b64519b35051e48", + "0x931a4862f94054ec0f96037a225f1c3421ef99d8e4a8f774407cf913273bf4ec", + "0x07dead0442ebccefbadb2859e0f462fe8d3e6677c7fd04b0f35165ceb017c171", + "0xc4afb80e1dcb56ab83d5b20475d2212b6c55ab63fee058a914661657d2b1187e", + "0xe198b8ed96c1b7dcefab5197036f1e5789ea6622dcca2d817e63537b00710b6f", + "0x728b8b91918a059e3f46ec7658bc5948016f76ec200d91ec3a163bad8ad6bfd6", + "0xacdebce23301e49e068c83a73ad8484d8af7a0c28d066e62787542e154b42559", + "0x7f66807da0c2b155126b1d3a69488b10de7f0ca38f6cfe6fa670dce511a1f667", + "0xabf510ce1a016b5b3d4610b3cc5c9dec43a6a66f3555a70e1c16cd451e8894c6", + "0x56ee11eb3233fefdc056827fccbea1ff2be8a3f460038eef68a1c730bc1104cf", + "0xbc4df43d4acd0fc2cea064abc5b5d1754b29073533652448d44c09adc1d32998", + "0xf61b97d2d1e537a6d20438f4729bb59ffc8db6b7375c6e710b4be9e136f1e19d", + "0xf89f04d5cde25522103d1e175d68dfb4955b4519a5c2cda1a041298579174be1", + "0x702c60e25d6a10ac9657553e376f2385cc94737ee292a3ae671ec02af982ad84", + "0x5194589410d5f02de764bba1c25da027861793270c956c56e67e017d89323799", + "0xc448a3b8bb1b6efed04d84f82d63aa0c7ca19d3cbf817efe4db82b897ad6a019", + "0xdee875d8c517db5cf20b745be7ea1d389612e47880657d003c6bfcfdbc8a6303", + "0x0528ce8bc52f4878268be5e00b5028c9af85cf61a6279e75240e2adf7b498ac2", + "0x6cc80f5c62a1510d9cc97ca8b4c6245938ddbefce782635dde0ca2516932fa21", + "0x184f9e305ce4fa0bdb47ad00bc7d10691bfbd7cb13b9c9d4d82c37678e8df080", + "0x69456f4aae64f57b3709d6c6701a1894802a4db96374da6b979b87aeaa5e301e", + "0x60d1bd0acd9013472acc0b8ce0b90a088f35d5c19645146ed8b3c6330ec92052", + "0x09f87c83c6be698b2eff45ffbeef79254e5ff8d3534831c79346e5b837110c1d", + "0xc5fc2dab2c43c27132f31a915423a6d6a30a560e4aa266483ef2ef1ac48d0fba", + "0x87a147b03150b429b0c86569db27ecde32c1bb9a48b1ea4426d8dfb895339673", + "0x1a68fd5e6efa8e5183843fad88d6445fe8b07c01b884f30563ce527e6b10749d", + "0x4123e02f2c2869bfd99f3138aac82bcf1923260ac28f33bc4ff2dfcd54c48e4b", + "0x452345c2de5fb18422b4327a79053cfff33f277f0e3fc69bf2073d0bda4dfb74", + "0x830eb26c402a88c6281b0a48a93ab16a64a8e4244f87d589b471a3ceb4577f6a", + "0x84445f1f8805d911009a3e55ebda5e12ff347e5a34d0028ca44a2a26846ba903", + "0xf91a600ecc8aa47f838a8d877487c15babc10ff97a5be8bbe4c3eb705fb04dc5", + "0x01e749439d413c66340934dce9d4c55179375208069fefbf89aef7c3c7f02bb8", + "0xacfb6917a7365046ccbad1ca8fdaccf8164c216ba8d96cf64af5340ee3aec2c7", + "0xa1c20a4bb21a0a99f3a38818c05a3ba66e8321f24dcc7ab26487810be8211dbc", + "0x7c88ea06f3c8cab9ac0b29376f53fff4b56672a1704520fbe8de0e3ca0bf8acc", + "0xc19e59ef2361963b94bb211b8f6c172670ed83fce3d54b3e00d1a82f292d1d2f", + "0x386ebf9ab1a551ddaf599d9384e70a34a961ec55d5f609c088c6fdbf38add80f", + "0x8a635842c7d38cb2bae11ebe3f6ec09e2d41ef6b7093e4ea8cbf079df97f06e8", + "0x2d12afbf38c0378d287693cc7fd49f3e068d454ac5ea7bc76f6b3e505bb09f27", + "0x33b7eb72362caff0e54cd853549bc4a802345d774856558a8b128e494ec63948", + "0x31eef3a0c2c9f586ad2305d8ed9831619ad6ce2aee9a16800cf4aa048b1d593d", + "0xfbeb707dd62204a0f3a06ef9046fba5b3b88fff3a38b5b08cc3213f5965c69c8", + "0xc8913325245c39d0609c6af407354e9d309c5d582c8957a90c86c944d2f5c6aa", + "0xbedd0ccdb57b01e688c14d6230aa84f5912cf13a72839f654eede63ca05ff52e", + "0x0b0b7435e02cbc3654eabd042c32409f4e178cb60f5e306ba041c985378cc286", + "0x749b3fade2cbe924b3d76f2b9e216c1675e8687bf7ede81b090ea36a4468871a", + "0x9a59125cd56262bf0b6d2393be4ba8f769b9d5a53b644a0700c9905dc4d91c14", + "0xdd2001f6cbcaf2d29a7d022e6f723e4691390f56ed6a599cb58bee74c64c67a7", + "0x05870eb07cf5d1c1013fedd8cbab73b7341e930a0c02e2dfe898d5299981e34d", + "0xef78e2f60d74c818cbf8996913c08a7c5abedb3f0f7ca412737c071cd6a38ccc", + "0x636a6d39d02bc22c139d9e203b1ae8b2dc485cb596641ba6e05929296e87ffc4", + "0x8cd6e1340deed5cf75bb30c3183807b653c7ccca712f379d5f885ef1a4100945", + "0x549d904ec260af5776c241a80c2075e31869b8da7026a078fe158c0b0d646b11", + "0xc73927c3be804cdab32e543e78f38f8e0899d7f0418e0b37af336c2bb37269cd", + "0x62a4d4dc610995d0f08756056ac199c3f0959674eee8d4503067b822e37bd1a5", + "0x4b4a3cb0ac65532d63592978b1afe838cceb7cac04d30a58cd4d73af4b99d2de", + "0x2c5aa96bb3b58a76b4acefd0275f9f7f82418501ac2c2052e0a5f43eaa4eed64", + "0xd6b8f1a8e502a93e14a8177bd750fb7e7974b7527f50bee37690b523dbeb077e", + "0xaf38bbf2941d5ac311f8ab4cec22642049473bb3a8493ecc27cce73dad188797", + "0x778b83f79934655d67e52a38c09410e5b5cc28b26f4f70b84d2bdff5519bcc74", + "0xcc016a4cf3d1f7119704551809e89c1b4f95d1dfc4cb7f95db2c5d2a6ff1036a", + "0x6b904799fc1df35a787bd7b1ce4337697bad98b25b2bbf264a7bc68d452df41c", + "0x915536046dad19310976f8d158795e4237bec7fd5c2d2fc8ab1fd6a3b5b75e43", + "0xc84167f477d317ed1e6b1452cd746d43ed679f4ff2eae84b1676ae99d3bb0fca", + "0x71758a9fb0e66a7a04a076569cd3bb41d5b91cd1cc26b21a773e67524af84e63", + "0x54f60d40ea84652f938d2856533d82a7c0b91091a0ad009f48e599a917b52bb1", + "0x618cec420aeabdbdedb8afba71b0d0408da5095ea3231455b6c817f9a585d437", + "0x57cf387db60cc35262bfa939b0272af7c490720e496b5f8cbbed6b4b26397824", + "0x7c8973551ad57c278a0d8702cfed004dac293e2b9464debdc48fa88be033ba5c", + "0xc80eed1b2bf137a5574d40252d4d02d0e3f87618c79b764407d8009a2b4932e2", + "0x09ab540be725d669810c498557b61a53b343104c5303cccaeb31e518782d64d0", + "0xc996eeca57ce9a7ed79f6db45ecefe1710abc4b3e21dec8f707eb5f7632c9c43", + "0x160af8ed3f5f55bfe0a9129da94ed002277580b297e29cbe23a962c41eed1cf4", + "0xb8935e0d55dad821ba5605607cd6a025d6ff76ac3d9d855fbdb3d000ef864dec", + "0x10f6dcd55318225d362a40859030fed3199ae015791ccf4c4bb343e7cb97822f", + "0x6f3a76c93761f73bd148c57f2bd00670c20bbd19338b7fddbbca65e598b70ab5", + "0x6e99e5531942dde6d881eac1f03c6fd2ba6f9d79b6f88408eca815d84ff48157", + "0xb05ebff5d64b736d36b372da80fe28d6e76b64c74e1ca94f1a859936c7ecfe7b", + "0x815a140ac8538595dd724ab2c31e13d30c03ed3b64d2cbc72871fe402c7c88b8", + "0xf4376c81472e0b3995618f7845887159eec1e929b8a485c2693219f4d5ade069", + "0x30ff47cd807666e5ac48659bbfdcf8883fc1c9ea4e9be8b397dbc969a82f8d95", + "0x74e35254636ae2f72005ec69e62273e9cb7387fbd4234aa58184e11725e2f569", + "0xb4f3de4e9a22e787077a53017e72bcc8ec26a1499c98305dabb61ce6582cbb09", + "0x3a05acc943f2b7d0c93cb4c5fcbbfa3ab9e536a2cda7f1c325fe3748401b4f71", + "0xd75d428c14fcc7ceb929328989235a5fc46f8ed3e0136d81dfc15866756455a1", + "0x20833cf410b3a0626a6c7e07d5fe09bee17b218416725ec3fac716597e1f6576", + "0xed31b1b593289be968e483888fc80e5c360beb66626702f388a0848bf104687a", + "0xf55fdb35314f9b873a70bb207536eb2e22958ff9264f1e088b0dd82bb9358b2a", + "0x701d2b5d8794e244d85366d13fa0382827ccbcf557ae2735bc3f88bcb5c778d9", + "0x0492794944f7d179da85f68eec5ce4575a90b7a6f161ae199373ac4685c6c665", + "0xb1fb246d93a6ca7a0ce6518a4d0cbaab074bf2ada05fad7a6d199cb830bebf8f", + "0x77172f0acbcf0eb3ffc2e314b548210fc74c10826162b2d63195cb984d606623", + "0xc9f684762b2378428ebf3dc8e207fe8d2b33c5a0ae1510ef44e2dda7e5979faa", + "0xeb864bdea464cbc95b1e3a4be62231f91d1230179b7bf61f755abf5f05cbc9a3", + "0x10fb3751407e792cd70fced2271f125d438d9cf3843d9ac6f89681616d0cb1ad", + "0x79f96b934d4b22eb9315230c113fcfe0ee2828133e10b14cf2e9b3a8007dd7ec", + "0xf8c0546c0c5eb3d9ee16295c5e0fb510a0ff3f55cb654313db2d8ec6195ee996", + "0xcdc41342d4045064634291c9bbce68efd7227bc2832b372502fd4907f1957ca3", + "0x7c09a67309e6a0993fe1cabb7528e5cab4ba6a79042b22b3a6cbb10778cdecc3", + "0x7d5c881d1a199b82608d7e27456788164041cb1e5ece5e3b813db550cb402203", + "0x4f7238409440404425f9f60c034696e4713bab4f4e507b76bdb2697d1117ad70", + "0x8e8642cc65aae464b068178c4c2276dc8860b63cdd8f3447ba07a0e7a9b20c71", + "0x59fd79b1b77ddb040cb6bd45f10ec5ebbc7ba421e14dd79755e568774010041f", + "0xc8bfd004b479e0ac8e2e798045343198909a6f5657625028bd28af69de58d9d0", + "0xb550f5483d01e8d32258708d82d463f28658d5155a15da214bb2c748e27365e6", + "0xeb7de7083d7c73f3f443e6d009e97783ecefd77157b830cab2ae7d41a91b0b0c", + "0x6f3145fafbfa23c5667b646b3b5fe12b0fb08e45f09e62e9254efed6ed812880", + "0x66cbab429d401739e655ee55de9dc3ee6223230e38daadb1fee88da03c46197e", + "0x410eb1081b98fc38f2447bcd358a0996bb9660f949b4d9d6a6596aa41de59557", + "0x2fb84f43e9750be8f37cd293742776ec76d95170689710254d7847369626d6c8", + "0xf1d6c7c8a81aeb7611531452bc46b69db1b56a31c1c835ec5b497b6f4aad2b8e", + "0x1449b942575209a4bfcac57d594cba694bf494e98e154142d5423deb193ad2dc", + "0xa14bbe0f97892409b86989212bf815a5024769707bb8fea7479417402a346f05", + "0x1ebaf8a3c224720c6cb4079b8e355411f0181318dff7766faa204d4292e6593f", + "0x70169dab4258b561057c427b3d31de403d87302180db843d3ac6fcece4a8bff6", + "0xeafa1e0854e1d0dedffc92fe173f2333d3a0581199fd3c63ea69356206329d65", + "0x54de1a943017c40959dbfc25e318b0055b0e041cb170b0f34e1fe76f41bf2f15", + "0xb4ce2d897abe390b9b53be9809fbc1dce8d23c2d726dfa1d0e1419d8caf2c6a5", + "0xca872995f8268bb0855fc685e69d590defd10fc6befb61a57ae75255ef92cc23", + "0x3e3606562cdf9f8460c18efeca2d7f3649ac96fcd81d2bbd501e73a01c729f27", + "0xec09b7014e049e25e0749f0a0899a2893605003f128bca3bfbe92a00c9dbbda8", + "0x9421e443b91c741c80138137362f7b30dd9a0db6a815a3b4a799ec12ba6de4c4", + "0x78c95ab7995ca6058f64f7fc4af9575923bfa70a092ad2268f0b2a3a20dcc247", + "0x386b36edeea33668de333683b1ec3d9c665f150aa96cf78be27fdfd8f185479e", + "0xc732d05a6122e07f7566a2a751adf5da1cd1ee23dc0b34c0cfe7cc12f0970c5f", + "0x283d97620ed18ac85ab1d2c613d64df485b6efa26aa6efa71f840240a95cc240", + "0xce779d27aacf6e512f5b52f96507325568f754b31db766cb4aede53c9e1845af", + "0x50bbb90f2a3cf4c479be3e118936246db591f98e37caef06fbee553502b4c364", + "0xcdf95c14c577f913331709c5d044fa536a4afbc06cd8d60ea7b5d9ac463e86e1", + "0x3a7d03a8ed71d0ef15ed70677ece7a2ea32ad1e96ea5c3266a1d9f580efa862e", + "0xda132c0fa137c542b06170fe085b483afdbc97c63b4546079262ed0fe1125c1b", + "0xe4cc4056951325ee41cbb70fb823eccad0f266e032e839a2e3a8e851f0a6c73a", + "0xc301415694835a5bf36e226ffa42d2fa8069dba6be6becbc2a7a6653c295e8a4", + "0x1856c6fbe4494ba46fb87deed5c10e361a83ee73bfbc8f1b604c6fc0301f62db", + "0x1b822bd73acd2fed2f8daf6936ddc8175507f373e701217b645cd6cb2ef2019f", + "0xdaec59b4c520d6b5be4d82b1b1338a5dd55c9df443d697fbab750c337f6986d3", + "0xce6ff73593693f7cda1159724457a2aebf0ca608105a5926fc98afe5010349b1", + "0x53c50cc4da6192ddfa851aa104f92c865ce212a87500098d18100394140ad6ce", + "0xf015a6887a543a0849a128cec761f10f7d94f7be083f739c767c727aa9ed95d1", + "0xa54bdb8a7ce7327aee991715a7d673d403f2248574bbc0ae5aeb9a32b3d9528d", + "0x47d46b4777a303430fa4e45305b31a113acb4aeebf205ca215447f9d27ea401b", + "0x7a9504b64ca570814194b95acb8400357667122b5b1fa4cef386b35e28de2c61", + "0xa63e5964474672c45b8b021b7cc408e30364c163c800fc302f342f119689e022", + "0x988eaa51716d4cb523fdd5fed04dcc195c5fe00a8e4b6135fe70b6f09520bdb0", + "0xfb8a8f98809114b3d433875844e5fe90cf0321b772aaf86863f96e8013b8de7c", + "0xc893ef50496aa272414f6a06af7d53281fbc7265a3e389a42c7c49d32d24fc46", + "0x4956f7747cb6489eeeb92b1c759f54d534397074c9cd017b2f5181187248449f", + "0x4b48dfaa0b7984d4eea56cc40c5130936f98752e1c2d02762b5caab600c39691", + "0x23df05dd92b7e6c024c02fef9cc6aef2ceea2d0a4827de1f87bc7eb4f8c8907c", + "0x47ba2e8a2e5d4fd0bb04be0d76a2d553d4bbfc574d1b5c715393fea03672f4dd", + "0xa8553d8cc2aa6cfa89c96564022be9e89aa5a6b0b98d88fb7976583cb2b4d2ef", + "0xaa3e7684a3a9daeb343a10566c21ed1fccb46f4ac4b80efaa8725127468b7072", + "0x55acb0e629a6b654ba723f330770a4238cee9aa9b98fc4536a7c8bcf6050b9e0", + "0xcf2cc1a66e1d9e2fcd8709c6e695bef0ea95e4d14523d34a544adacaef0bf786", + "0x6d955c9a59acd3c90593b8eb22dcd614a89ec947d554a0fc8323e817cea7b332", + "0xe29b3ce7a0e8325135a33a1127d1db2f17637090712a0b440bcc3a3bd6301186", + "0x47187e16fd16d67eb94e48e2f35eb520191ce412b95c1d18443487887700c516", + "0xe2ea962ec229af7a26687783a7ba8f65490d9aed8f34bdf7aeb978c901f67c1f", + "0xcbbc65451fd646ce67f78593806e3b66379d1a3aed301e6e67df9e3ecfd3cdea", + "0xedc8308273c7ec9b50c8c328c62cda10e4e8505e3f67d134d0961d0fda239747", + "0x57d5dda883a2dc7ef9b36453edcf481d97c06113d7a060131dd39133ded06e6a", + "0x9392ffa1001ca3a85455471b660ee9d89c8d6d421fb4b00d7ef1eaef22747009", + "0x0751d3a58d8bfce5febe6980c821c92f9bf5beb8685358375640f751fc5c8369", + "0x6ad5fcb53f602b55dc728c7850f628ed5c500b65c6b0d22c077fc4df980d9306", + "0xb9f2d92291fede8a3def3f527a8449f6e330d587df1ec3daedc1f42cdcbd6af2", + "0x85eef96182742e4090221c1ac4433c106467e48f8c46ead864e6150d7d50946d", + "0x6e800d685f7f68839978c124f300aedb6c34dd7ccd7b2dcd3144223d0e3db527", + "0x9395edff6ff47843cda1e5cee061f449dcb7b5a0ca07daf9788079aa7c5b2ca3", + "0xa2995f2cd9a79024f92ecfaac2cbb17a9325b3f54480bf2ae5f3d171132780f6", + "0xef907147aaa023015d9b8f3e7414dd4b84e551af0d9f19fb918ad5f632096657", + "0xa44d76467ee5fd7acc90c13bb12740b3fa8a0096ad0be0e3619841e67323c5ba", + "0xaa574a790a0a50cdac6be3819be64c67427f5a891a0029333da60a01680d83f3", + "0x531d9c8a5d68299fd55cfe498922d54af8b119500f7e6bc0c1b66b45234dab1d", + "0x45cc89de803b6eea973d45c26b4b4287d322dae61163ecebdd3db92f8a40be52", + "0xb305c50ca54418553d0a08c2a01908f3d413ee3eafb55eeb4cfe32aaad32597c", + "0x5767e979f2322c81537baf5d300d24e736373eb399c0e7ae989fc0f177c40bcd", + "0xe3646fedcf2aa89d6ab6a6a225cbfcf539317e4c59be2655b46f0d89142d9b18", + "0x9edc1e15d3b2e5eb5f0b60f15e3b39b0be95ddf0bb6e4d18f4c52322b66c7867", + "0x5bf071715890e9f227fc66c705e782add56e39eadd30dc982066fc067ac64fcd", + "0xddb572bdf8c2ff6ce8a0d9b6b2c95cbb33dfa9b96bde4b74812ac016a93770e2", + "0xf0687ab1fb94e49d1e113b1fb0127548d60ca1dcf341a1952d16dec090aa2736", + "0x94308074a4ed0dda1ebaf569b6cd5b4b6f7e77bbc41c737c78462a1ab330e3c1", + "0xa7141bddbfa78144aae11810acf034e48bffc3368c862d4f750112899150936d", + "0x1560476607eb8c091ffbc0ec21cb3091c3385f9c83f8d27c4a301ac200a92820", + "0x8c0b77737383dde0abf21d1350f0d2c285344720c314c564717ffa034acb1739", + "0x2782b37efd53e9a34ed2742be32de2480de342a384d81bdf3359d3686a3b504f", + "0x6e9848f3a434e019bc209a02ec769dc63e084b9c0f00472c218589dd3c256017", + "0x721d8f1bc957b825d3abe06a6ad9d8bba2e03b915aa779ac42fd9e5bc247411a", + "0xb937b6dbba5291fd35427afb0126ffd47962a07c3bbd6c77e4b80f6bf751c684", + "0xa5461d8dd8d05bf13875c6b2b62c82d371163d2f8ca80c2c0ee4d94061355a7b", + "0x58901227df5457e57c74db16ed72b9b6f9303de046e7adf6fc269db7ee9e0a28", + "0x33cb303cd28fa770e6c9f067831cb1829a8ccce845ac6bdce7d3da5d4ba8f3d1", + "0x3ea819ffcb3839d7a16ce09958849118a801152ecedc92de0ae5154f6c8a5d1f", + "0x001ed88b63827a6c71d7214b42551990e3f313720d982ea5f1049ede4f9d228a", + "0x2e346b0acbeeeb88e08ca6af0f2bcc3669e093475d8c224f23a45d4e2fca462c", + "0xb4ed38878080e2128b706ca1ac4c92c0def02a8ec094d07312068c10f6b144ca", + "0xd0d3c577cf73421791431a6f00452f33fd06f771de9ae2caa32ef2c93bd03951", + "0xb3dc9827308478b197b3995e39aa930d40df8cb98fcb2958c6ff8ece2bbd4c16", + "0x44547163a565de79110c1e42b81151a6e304df14953e648742558efa6f63672f", + "0x487a072363419adfe51ce0b8be352c0f497e5720617e9a237500293459cb7bc0", + "0xe7e9d6bbc5de3d62822c92cec484926d935038fff8be29ff3743008499000d9f", + "0xe151cd789208eeaa791f146f7e7efd46c17e2f8aaf061ba5bdbd7357a32cea39", + "0xf5e2de235cd7ecbc34828c244da80de6c9c452c253e6c876d9843bf5bc665daf", + "0xd0bdde209238ddc6ccd1e06e6e69c2c16b38ebac8b9f3202d46bd6bebacdd893", + "0x5c3cab37f5d83ad8fe9349ba88ead08f36e71f149f10642c819ae53b26ccb0a6", + "0x1d069657a6a467ecb3a781a3d72a8b701faf497e1098fbb9c04ae29820fa4e75", + "0x8fa114b013195af63e697432f3be7017ce8fd6e7ec26f23f832b003e82f17ce0", + "0x2664fa9d45f83ddb5c8a758c6fa3201abe7ea14571c886815585c543f245b301", + "0xd6c3528e946a4461efa56df07020008870ed64fde08cd72e7f8be581207f0d1a", + "0xdc4c0f579f3e44278905dbc69610f7d1025e068f75e5475400d96c97297fe5a3", + "0x06124cac7f9a1c10d48216905db968c9580b3744ff89b37b9b49e7308b04da8a", + "0x5c0c6c98bce9c64b7b62d62ecf02f435ce54b76c58883da177611774c79d5fba", + "0x424553682070092aa2ab3e454c66dac9d6bf6baae6379d91405a47fa48a3f200", + "0x3d09aa437ff840909ec6024c1e407ee7b5622561e4991b76968c146ae1c4976b", + "0x3ef50c81169af169c100f58f3afcb8e2f926d957b2adbaca8787be5d4e8d7233", + "0x8783eaeb56ca2d7fec84e0e272b77271fdfd6c14452a2e1dd83de770c5d99a1a", + "0x861024460895378ba100c5d0c05e62bb6cac8b21ae529ab5cab39eb6c6cabd90", + "0x1c741ed9eda60e5ac585e2f48f06fb988367c2c40a0d8111bb04b260fe44ec6b" ] }, "accounts": { diff --git a/ethcore/res/ethereum/ropsten.json b/ethcore/res/ethereum/ropsten.json index 1e5972313a1..bdf44fe83bc 100644 --- a/ethcore/res/ethereum/ropsten.json +++ b/ethcore/res/ethereum/ropsten.json @@ -53,8 +53,8 @@ "gasLimit": "0x1000000" }, "hardcodedSync":{ - "header": "f90213a0f6a1b2e8155af1d1d77879826e2535cb6023ba35705934380ab05f65bcbfb107a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794f3af96f89b3d7cdcbe0c083690a28185feb0b3cea015ca95dffe4c5de6d9c02d9282df0db94855b0d602738f4b6fcb2268694cd92aa07ecb0900077c45bd4d3ca910218099f726fea18461f90be18897710767a51559a0251f2cb798e965c5d9b11c882f37c69fd2c42b314fabe64d2b4998c76eb93ae8b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008439b475f9833720018389769d82f618845b45928b96d583010b038650617269747986312e32362e31826c69a0fbd0db05012df54423a6b25395ec4f6e66d9f11af8b9c492c4fb7197fcd6a5ba8877d4a227c2bdf4de", - "totalDifficulty": "8809217991079619", + "header": "f9020fa0a415a8dcd55fe9c93372da415ff6897036e48cd3c1a5ff8ffe119eea1096ecd6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479443d58f2096e015db88e44346e73d8c59cb1753bda0100f05d66d36782b7c061c724d8d07619cc61053eda41badc8d2cb9292898ebaa00a60317b490365f40f0528e1f559b0f49facb6638c82a9490d368e963647c704a0118b536c3bdabf90273d527dfc26914c7878176fff16cee8fbb150e00ffcdd29b9010000000000000000040000002040000000000000000000000000000000000000000040020000000000010800000000000010000000000200000800000000600400000000080100c000004002080000000045000000008000080000000020000200404010010200010004000000000008020000008000000000000000108010440000800080000400010000080010820008000410800000100000000000000000000240244000000010000000000010010000000000002000000000000000000004000000020000001000002000000000000000013000000100000800008000200000104000000000000080000080200402010000000000000000001020000008008501602a8414833bc8018347b7a983476d40845b838083904d696e656420627920416e74506f6f6ca0f540bc9cfa258b97576bfb9a79518b2c07ed73a98bc6baa61cf7af4b40ad5b6988965658a406315a8a", + "totalDifficulty": "9811143388018700", "CHTs": [ "0x614648fc0a459451850bdfe353a932b5ff824e1b568478394f78b3ed5427e37a", "0x1eae561c582dbb7f4e041998e084e165d0332c915d3a6da367638a8d24f3fafc", @@ -1819,7 +1819,156 @@ "0x4b40cd83205f8b946ca9f11fc3306872650e658e631511fd4080bc8ca749d913", "0x652acc59b71ca20bb65ca195d1a4b3e177f6a3985bdcd6120e1a45b7d4a0c7ca", "0x49a5e2580ceb329665244e489592aea27d54da8189a665d9435e037ea70c46a5", - "0x379801356beb3a8e5fa7311792c69c7ac1f675a9c08c837f9f0e9f53c243d6a7" + "0x379801356beb3a8e5fa7311792c69c7ac1f675a9c08c837f9f0e9f53c243d6a7", + "0xfeff4543c70356b7d9767c26b228cac8452c95bc4875e92a60d7e6fc50468667", + "0x82eb18827313399cb3b0d3c975eb9a9480c3aa5587ce72d321bedccaaab56115", + "0xa644c582a751a6d8cbf30e59d6e770a6f441c017b486cdb23e6b9c48c614967d", + "0x75e6f4d4185515a3c58dea60b55d5e50af053bb261db2d3de00f812a4072ee2c", + "0x9018506876afe91cd2cded037f41c5ee8503fede2a59c47dfe7ea1b36c460f73", + "0x0b8bc06bf211f715291846c6c34b805b6aee91ef4e8c63cf3d15ec79b44ddae8", + "0x150e43fee00d798b8611d4e03838072d8b9c8ee3771e840ee2c161a541d6b643", + "0xe98c938110bd4227c660a9bdccba3b5ce5b8414d909514502e8f354f96abce69", + "0x0fdc3bd666d74a99e623f6d4957bcbfb17395bbdacb52b18ecf7aa5d2e4ad2ce", + "0x2c72d738e803c0522b5f424731ad6327548ef08ded4caf4ae0466b2cf171ac4b", + "0xa7709a082f8a2ed01e2e88c4a2d18930f510fe5b831d2a1bbb1a85a858f19e54", + "0x8ccb960803f6b6e87515ec20a36734128da0dd6e65845013d89996070d3cbc69", + "0x4ab2e49286c8505f32ca54314d1b5c3b2952578d191c69d1cc7ba78a000a01d4", + "0x6bb792b52949c952f20b9d11274c8a05edb44c4070d949bdfddcb0eef2464454", + "0xf61eccaeddac91487b2f5d7b3ec57d1ccbdd0e1307d2f624319fff525149941f", + "0x2dcf01b0d9ce31c13c20959618f2e5e0c7f8c48521f00c12c99d28bfcf202f57", + "0xb1e83745e6013706904b7322350c8b453aaaba0a61ba89f35eef8f19759ceeb4", + "0x8a2cd4944bee70696aa216b453774f4d915dee4bb78c8f7ff55410cddec318fe", + "0x49a78ff97bd4938cd030df23e5ff4d84ace59ef3fdc0099941d8ad7f82235894", + "0x61e2096919419c8856a5473f135f4f9febb102f80eecc90ad60baaed77a99d29", + "0xb95709562a26564e57cafa00d0969305ffdd0aea7e523ba90957a6cbad6ceb70", + "0xb7a6438f176aed77a9c6470e28ab8cf19dad8f77dd3313cfbbbc9276761f454b", + "0xc85a1c37b5a4e1e8ae67df31b369f0b44cdcf66418b42206e5b4a738a0a0bae7", + "0xf034b0d125eb1bd1bbde1fe413aef80fdc136339eed062b97cb9da2f4bc94ba5", + "0xcddbaba3e9e1680a2d1586a1e9e2268afb51b4894f2b41fb2eac5f667b6cb655", + "0x971b2fbd25a97bc710c30b6b98efc90428a7e423c09fe772622e7ea37d18e49c", + "0x8571bcf05c836a59a5d3216634ed1f691edb7bcddc7b5412bc513ae9e1fc0423", + "0x0543d8abe5e1bd541a5bc994b0c7ea5a556575d2e9302d1dc36348ebb7ab9e82", + "0xebd8f9095619980043f234bf4f7682534ae6a15b4584e9d1ae524138471467c3", + "0xd0699a768c873ca5bd615ab5b0fd9c7c6c649c4245b58cb622210ac2bfcf3111", + "0x35bd5a50b11033878751d985459657bd4c0206584cd1def98a355d7ffb0539f4", + "0xd476d2b8b62b720520ba0990137629f83db9c6896f1e788fa5bf08614af1f2bb", + "0x2a8034765c582c07580c0da45c4216bb513171d073a99cbca2fa2adc0c05dbf8", + "0x084b45a575706daccfbbbcc321a9546b0c911861f2639d81f210b77680f60fb7", + "0x499bfc54ba17718aa8f74ceb412dd9b6b6c7d1cbde4d8ad2385a5d5d77e98915", + "0x8638684b06607afb53385f477f0f295453f14e36161da6dc7728336475303a9d", + "0xa686ad8ccc3507627712d203f7ff3be65c289da67269388d0ad8949ad3a5a589", + "0x6d262ff8babed3a603f400501cd77344585fff92e5bab5f5848873042ab87cb0", + "0xb5755234055b3534fceeafcc84d0d77dac220d3466e1dbbbe7d7d44c345e1eec", + "0x92a54b989a1aafd36f979ff0f1fc73f9b1508628f054430234f4460cd9f3959d", + "0x1a48d9bb57e9a70b03c066c97ff9520c177b8361f2b3c343ac71fd8cbdbadba5", + "0xc9e103919e8b6827a1fd57a3e99155a24810f524fb69a7e3fbf486707b2da690", + "0x22a5ba2899ce696fe691a2c545b98ddecb7c0a844cff2375e50e0a0f0f9cf9b3", + "0x154890ebebe7f014aa88f8da52c9713e2b0cc8b9b6c7d29b354afa000c2eb429", + "0xf9a72e3ce36943a442d0dbc7a869a6344003d6a7ee4d344c1899ab8b96746a6b", + "0x270fe45a526aa3bcc8e4a5be075610ae9b15d6fe48b04c6ff0c82a1c7f56e0d8", + "0x9eacbdd695faa85bdbff10bcefcd5edfcc489c3380cb1c73ab5ca6cba2492995", + "0xca15124d292e0465c7e50276cfd374c82391bb668d68a3b19c67fbb9e68b0bba", + "0x9a8cfd3f1b8ad178c982feed1fc306dc996da3d928e99a6f8ad240e55f52cc3e", + "0xc173b14778977c5d6dcb1079023c493406911c6d6791ad08104f847323aff7cf", + "0x82b3e9b9a407ff0596aa7eabdeba10dded85fbf3258d12949ed4f46484e0f649", + "0xb8a009c28a2328b7c95531bc64e072388d797130652ea0a84159c801af562a5f", + "0x80e45ff8b60d28dbb45530b7b56e049e605d28087df7133766de0e151dfbd01c", + "0x7667e9cd22c5427e6f37f215b1d9cd90b2659d5db0bb499daa382e09fcfd58e3", + "0x825014f354bee6aa4cf5283f86e8293c0ea6faf07862abe2cfa12740a0d846d3", + "0xbe9116073311dc303fa29f7a226ff79fd39ec2ffbd2d2270652b972a701aef54", + "0xb3fd19eb6b0163156a350156b15e660853f68a903ea6ed758bf80b2fbc5a655b", + "0x3c5a408909bf485e8c3d8e683d8ae4e7b7149d1f01874807428a3f88e1293d4f", + "0xc759f51aceddb90e6e4273fb9439908592dea649f5202f530d15d455c5c8c7c8", + "0x1fc97f79d1b1d72da309bf2027ae193f0594ecb77c07a472421baadda577a24c", + "0x09c4fd284e5c0672ad031629e58fb2776d1e772c6041c91433f287304dfdcd1a", + "0x8c6b40f8700a0638ca5b7bb003c2de6b54e731881a072988cb78ba89557db1d6", + "0xcbc701507f4f970d8dbf15306d6ac5bdd46b6f58bfd2dd07db4ee01c65aab5f7", + "0x6b911e52f43696cab429c0e511fc40011846af92a2631f4d3f2cd040567f021f", + "0x4fa5373f6eeacba61818052da8a9787c4ed86be0bffe0672c0aea792cc3d2d49", + "0x518ceeb36cf1a406b89040ab0c60e380068f5f6571d042d3bd9a9d7af1b12204", + "0x82039214db7d3beae631682e9249b06f98e16153f864c2026a6edcb0727a6532", + "0x9a9e534af83084007e54dadbce2a0224008a8c42356c2ea650606e8e72d2ad8d", + "0xc4dc0b662ac8d4d987b8c2c65cd27fb8992f65d598bbc25f017c45b7032451f9", + "0xa921f124b5275f07b2c064d477d5779963096632f03c54ac9c87917bc4aa6e52", + "0xb4fe786c84a19c8db63117c421ce53244436d0c04384c62d508a88589f4e5c83", + "0xd57ea382802130e3ee8e7671be2b40a1392ebc6528e2e10d9d51f2899ac7ea54", + "0x224523fcd11abd078840803bbde2c3d1d359a0c2cfdd6f9110a93ac4d3c49845", + "0x5da21f79c0f1030e2a5dbfa283becced75e7ff223ef14a6cb5527434503ac71a", + "0x3e315746e89fd3e662f4dfe28245457c3b8a4ea6d9450b93639424a6d8ef7d41", + "0xe142bd09a6967021c4c5a5c7491b0df3b1bcf13564ac934a2668e1b2252e4723", + "0x2e9ab88b366cba155d1f6093e4edbbafd80032b07a192a35942aeea1bf0f1074", + "0xbcbd1b446853135abae9f330d63a06119a93fb53b63c621941f5a74324c384a1", + "0x7816de8933a433aa2719c99db24e692e6ac6551416b08f803302c7751fcfa3ee", + "0x52c53032dd84842dff6580013ab1edca7c251598ba5fc8b903c40e3d9e537095", + "0x771222b46cdcbeba729436ef39f81732aaf4893da720c1e7b59ebb3fd38ab9d6", + "0xfc440de1896e1b2848f2cc5c7d62c0a6b0019008de8aab41df5395586d340d4c", + "0x0df9b803307465e54cc77ee60dd6988e3d713d496ac0209509c5312ad0f19888", + "0x88179429086844e69122c70b6af8b689abea11e13ba0809743532663e3047c3e", + "0x503898d86c4038eb76124e18459e0d57ec25b177fc79bb87eeccce3fa75f0037", + "0xf46efd490382e4e4a3ac0e10926b46ec56e833769acfe3879acffb13030cfe9d", + "0x6efde5293a9d22e688165f2a51bb83e3b53584f9ef5bacc0bd1d99f5c7df18d4", + "0xc03dc313d9be55674000d061851c139f4c57c6fb46106b98c3442832a51c5f3a", + "0x8ec2accb7c89019d0fe2a31ddada0b8251ebe88a439de9d68be73c0994a6523a", + "0x36ec73d7e8c0181f6335c668ad7713f8eed607e0c302b43413f904b26160bdc1", + "0xa53e90d88a751a78397adc6d2fb4f93ad2b38f0ebd00b32932c4b2a39a276d9f", + "0xea2a25111ca37a53c8fade1ef65097f0cb85e00881b1ff201d36c9151a8e1a0f", + "0x8c1a24ac75821a4b0570c22b682c6030c678244b059ffd4b21e4d3cd05afb585", + "0xd230d46b14f9586f7b1947add191a69ed381cb6fdaa9b28c28869da8c6724dc6", + "0x4e8c717de6c94b4be274f052d9df627c945afe18df35c58a90a035759dce6b48", + "0xdefefe94ca6d93b78ef9f92911eec4c2ae0c10148691755f1624f86445a3b2b2", + "0xfa6c031ec69c7dac9f9b6524054806e2fa524c3200fdeac030d3bf7029ab776b", + "0x9204683278cad559a3f9ed2ec1f6539f34efb17957830d0aaa92b73cd0994732", + "0x2bb26a0cc5eb7424033e8bf552c7964573a0954e5b6aae32a51d95d951fc044b", + "0xf6e3827687af53fc532ba163d7ebd5ce18ffa6384157982b6dfdb1f4be82faf5", + "0x627bda246bf499f4498e8c512c8f601e36a1dcd778f21a598736f83b5b33fbd8", + "0x5816773273e4ef45fbacbe920dc3420602d8134fffe302459891cd6e65627f6f", + "0x7cd637fa8a5bb32dfd7c076c34e0e536d7c6710f1888f5ec18d64191a9930744", + "0xfe016a6e9ffd59237b349a817ba8c36f7775fef6513c4d430fbfc20ce72fcb1d", + "0x7c466b30cf23cb7115a3855d9d9c537c29f380ecfd7aaa0a6e59d9680bc43971", + "0xc49551e5eb138d28f5557ea8e8a8bbc7c058084ba0edf8f6bf64471928ecde43", + "0x7366cd68072f52c5644dc9fbe2e72339705dddd2b03690dd748265723944e754", + "0xbfb7c3bed071283cc18cca4192e2bdc1f97aee9b7185d493a29f20ed4d52ae3d", + "0x40c9fbfde126c8c92cf6d15a18be4b5a4da39ff60ce45cf5e9a2c1782244947a", + "0x1358333dd1b82e7c805dced5dcc2edd5d5784fe8104b64eabc598096c26d05ed", + "0x0a9777f7711b6d9f6cc6ad4a78b716199b57f026d1f30f04ded5d2cd8286322c", + "0x7a848ef083a03e4112c75bef258633779cc495df4ac5529e12c8eadc484baad7", + "0x17025c4c255f509163d11df3cac35c1de823e5ec71cfd66693393882d81fddb8", + "0xed0a2fe0dfe85c7d606d0d1b355518a48220e91202ec1c1178c8b37c86e1c9cb", + "0x4674e5e97dc0c615b03c34c702ceb07aec48cd7e8edc953a188d194a8cab35c1", + "0xa004c31b7a6f6511d7eeda4c400c5ede847ce2ea4339641b6c419f688cc4e574", + "0x1bb6c124e94df05e091f6aea45e7658351fd21318e91105274a15be4f7a7a632", + "0xc365b8bab2d6b902508d075195a1e23b04c97e40217c4ffa49fe16bb13e5b575", + "0x93c1383e2ab52ded5fc86e1b6f6e292c0d0977e8756e1fcbf87b099682060e25", + "0x2921b874e3855d9afca966d5088e9b1cfe59f37db54bf0c1e8bc5c5a97e7ef6c", + "0xfee916dad70495cb45c6ac632241a73869f0f553bf7172cc4e63fa382a5e1077", + "0xc63d63b401941d11edf755d7b6564b4fcba1f84ed99cd08f1580faf2c224478c", + "0x8069f00ca36baba06b310e28f6873e78b3ded25b456d95dad7eef4c517e7d096", + "0x48dc0968af32e91a8a3f252bc0082d7c22f8dd9797e4fa774d5b5365386a4602", + "0x861ffb24def147c4577aaa332aa2b5d3165542c4fd1e1882a9050e5de10f1f57", + "0x9fdfb7305249e7a38068f8099b74c5584a2f2df3f0514371f20042a6c0d2c85e", + "0x90212f15c5d6b686810e1895a139bbc26141181c3c472ee28ef9904553a2faed", + "0xdf967dfde0b63153142628dae7522307afb35b24146d9fc1d35874914244eec8", + "0xfb17a9d9e9a5ab6d742f55c0e8d80c5e5d722e9411e84bc47b1d2fe5fc33dd71", + "0x56b2c30b802088f9ebf0918c7ce44b50a96a0eac98f89e1711f53452006259c2", + "0x4c38b33bd4089a48a65369955540219b44af520000b8f8c346bdc96d47dc2479", + "0x7d3487d3823204a94e557b7a942d7c657c1b49d9f40f2d739a9099d4438695b3", + "0x2f82f94eb74aa2c0fb91bdb38ba0b416ce7e033c7e2a74aea113d201b225e24d", + "0xce8ac4b2f0f74601280bc3df0f5955d033e5c511372d812b36dffbca4b2e4736", + "0xb6ea6c89e91ecca1fbd723188a77370e9d23c3df8aac282cbcbe000b13d31b57", + "0xb8b8b7cb900482562227a0a1256ddbe822cdcf134a196e29b1aa6fd92423a4ed", + "0x0f47ab9f4985c3175e0a30a237d4f7c42db4a58f95344a86a818e459296610fe", + "0xc26a55a23eb390fc83fbdc9f07ee3ce09e8a2b28bc09386323ffc0844e13be24", + "0x1d5e97a7fdf5f34192e9b6dd255c41b63e24e1b4ef4ea43ae510fd1f50abb022", + "0x58b6120c4f0a1004e51703e77decb1c85bdd40bc83654d3f8ce5f7e3b4b6ca25", + "0xa2f4b27f5641d9294ee0411bcabc574f94b294ed8c50c9ae0c3d58caea7d6ba9", + "0xc4d6d3a8b4debec7ec7f3e2a6e1c64de46ec1187ed6e87fa2d87412c9874a3a6", + "0xfb59d9b66471893988d0fed17898fe2b7873d85aed217c20b6f438ed70fc340b", + "0x850df664737f288ae16d701878ad04f399b367fccaa2ddbf975d77868bea7cf5", + "0x4dfe47362c005896f82ac2d02a12ee9418693cd2f5d1bcfdc321980897171993", + "0xb652952de1bf9e1174e5f6a37b069b437792672a37a9e0159c4f36b6e64306b4", + "0xb72dd6cb5df1b00dbbd84e097e2da79af2ce60559697ab4c93b0a8b85b2ee406", + "0xb96fd4a94ac30c10f757691f7f06f25a4900fe424f4eb7ccf322e2f95249b914", + "0x99fd442599036f161ccef1ae8088c5ef694c1819f5b76d9d2fa8f979935f69f8" ] }, "nodes": [ From 8a5c9a8c709ebb92fbcf7a3d085d6e2a1046fcf9 Mon Sep 17 00:00:00 2001 From: Wei Tang Date: Thu, 30 Aug 2018 01:13:45 +0800 Subject: [PATCH 15/15] evmbin: Fix gas_used issue in state root mismatch and handle output better (#9418) * Fix gas used in staterootmismatch, and print full state root hash * Write trace info for stdjson to stderr * Fix tests * Remove struct trait bound --- evmbin/src/display/std_json.rs | 74 +++++++++++++++++++++++----------- evmbin/src/info.rs | 29 ++++++------- 2 files changed, 65 insertions(+), 38 deletions(-) diff --git a/evmbin/src/display/std_json.rs b/evmbin/src/display/std_json.rs index ebf099bcf2f..6bbace4a6cd 100644 --- a/evmbin/src/display/std_json.rs +++ b/evmbin/src/display/std_json.rs @@ -28,44 +28,60 @@ use info as vm; pub trait Writer: io::Write + Send + Sized { fn clone(&self) -> Self; + fn default() -> Self; } impl Writer for io::Stdout { fn clone(&self) -> Self { io::stdout() } + + fn default() -> Self { + io::stdout() + } +} + +impl Writer for io::Stderr { + fn clone(&self) -> Self { + io::stderr() + } + + fn default() -> Self { + io::stderr() + } } /// JSON formatting informant. -pub struct Informant { +pub struct Informant { code: Vec, instruction: u8, depth: usize, stack: Vec, storage: HashMap, - sink: T, + trace_sink: Trace, + out_sink: Out, } impl Default for Informant { fn default() -> Self { - Self::new(io::stdout()) + Self::new(io::stderr(), io::stdout()) } } -impl Informant { - pub fn new(sink: T) -> Self { +impl Informant { + pub fn new(trace_sink: Trace, out_sink: Out) -> Self { Informant { code: Default::default(), instruction: Default::default(), depth: Default::default(), stack: Default::default(), storage: Default::default(), - sink, + trace_sink, out_sink } } } -impl Informant { +impl Informant { fn stack(&self) -> String { let items = self.stack.iter().map(|i| format!("\"0x{:x}\"", i)).collect::>(); format!("[{}]", items.join(",")) @@ -79,10 +95,10 @@ impl Informant { } } -impl vm::Informant for Informant { +impl vm::Informant for Informant { fn before_test(&mut self, name: &str, action: &str) { writeln!( - &mut self.sink, + &mut self.out_sink, "{{\"test\":\"{name}\",\"action\":\"{action}\"}}", name = name, action = action, @@ -91,30 +107,38 @@ impl vm::Informant for Informant { fn set_gas(&mut self, _gas: U256) {} - fn finish(result: vm::RunResult) { + fn finish(result: vm::RunResult<::Output>) { + let mut trace_sink = Trace::default(); + let mut out_sink = Out::default(); + match result { Ok(success) => { - println!("{{\"stateRoot\":\"{:?}\"}}", success.state_root); - println!( - "{{\"output\":\"0x{output}\",\"gasUsed\":\"{gas:x}\",\"time\":{time}}}", + writeln!( + &mut trace_sink, + "{{\"stateRoot\":\"{:?}\"}}", success.state_root + ).expect("The sink must be writeable."); + writeln!( + &mut out_sink, + "{{\"output\":\"0x{output}\",\"gasUsed\":\"0x{gas:x}\",\"time\":{time}}}", output = success.output.to_hex(), gas = success.gas_used, time = display::as_micros(&success.time), - ); + ).expect("The sink must be writeable."); }, Err(failure) => { - println!( - "{{\"error\":\"{error}\",\"gasUsed\":\"{gas:x}\",\"time\":{time}}}", + writeln!( + &mut out_sink, + "{{\"error\":\"{error}\",\"gasUsed\":\"0x{gas:x}\",\"time\":{time}}}", error = failure.error, gas = failure.gas_used, time = display::as_micros(&failure.time), - ) + ).expect("The sink must be writeable."); }, } } } -impl trace::VMTracer for Informant { +impl trace::VMTracer for Informant { type Output = (); fn trace_next_instruction(&mut self, pc: usize, instruction: u8, current_gas: U256) -> bool { @@ -124,7 +148,7 @@ impl trace::VMTracer for Informant { let stack = self.stack(); writeln!( - &mut self.sink, + &mut self.trace_sink, "{{\"pc\":{pc},\"op\":{op},\"opName\":\"{name}\",\"gas\":\"0x{gas:x}\",\"stack\":{stack},\"storage\":{storage},\"depth\":{depth}}}", pc = pc, op = instruction, @@ -155,7 +179,7 @@ impl trace::VMTracer for Informant { } fn prepare_subtrace(&self, code: &[u8]) -> Self where Self: Sized { - let mut vm = Informant::new(self.sink.clone()); + let mut vm = Informant::new(self.trace_sink.clone(), self.out_sink.clone()); vm.depth = self.depth + 1; vm.code = code.to_vec(); vm @@ -177,6 +201,7 @@ pub mod tests { impl Writer for TestWriter { fn clone(&self) -> Self { Clone::clone(self) } + fn default() -> Self { Default::default() } } impl io::Write for TestWriter { @@ -189,10 +214,11 @@ pub mod tests { } } - pub fn informant() -> (Informant, Arc>>) { - let writer = TestWriter::default(); - let res = writer.0.clone(); - (Informant::new(writer), res) + pub fn informant() -> (Informant, Arc>>) { + let trace_writer: TestWriter = Default::default(); + let out_writer: TestWriter = Default::default(); + let res = trace_writer.0.clone(); + (Informant::new(trace_writer, out_writer), res) } #[test] diff --git a/evmbin/src/info.rs b/evmbin/src/info.rs index 080c0c7ff2c..d9ee6729e96 100644 --- a/evmbin/src/info.rs +++ b/evmbin/src/info.rs @@ -81,10 +81,11 @@ pub fn run_action( } } run(spec, params.gas, spec.genesis_state(), |mut client| { - let result = client - .call(params, &mut trace::NoopTracer, &mut informant) - .map(|r| (0.into(), r.gas_left, r.return_data.to_vec())); - (result, informant.drain()) + let result = match client.call(params, &mut trace::NoopTracer, &mut informant) { + Ok(r) => (Ok((0.into(), r.return_data.to_vec())), Some(r.gas_left)), + Err(err) => (Err(err), None), + }; + (result.0, result.1, informant.drain()) }) } @@ -116,20 +117,20 @@ pub fn run_transaction( let result = run(&spec, env_info.gas_limit, pre_state, |mut client| { let result = client.transact(env_info, transaction, trace::NoopTracer, informant); match result { - TransactResult::Ok { state_root, .. } if state_root != post_root => { + TransactResult::Ok { state_root, gas_left, .. } if state_root != post_root => { (Err(EvmTestError::PostCondition(format!( - "State root mismatch (got: {}, expected: {})", + "State root mismatch (got: 0x{:x}, expected: 0x{:x})", state_root, post_root, - ))), None) + ))), Some(gas_left), None) }, TransactResult::Ok { state_root, gas_left, output, vm_trace, .. } => { - (Ok((state_root, gas_left, output)), vm_trace) + (Ok((state_root, output)), Some(gas_left), vm_trace) }, TransactResult::Err { error, .. } => { (Err(EvmTestError::PostCondition(format!( "Unexpected execution error: {:?}", error - ))), None) + ))), None, None) }, } }); @@ -144,7 +145,7 @@ pub fn run<'a, F, X>( pre_state: &'a pod_state::PodState, run: F, ) -> RunResult where - F: FnOnce(EvmTestClient) -> (Result<(H256, U256, Vec), EvmTestError>, Option), + F: FnOnce(EvmTestClient) -> (Result<(H256, Vec), EvmTestError>, Option, Option), { let test_client = EvmTestClient::from_pod_state(spec, pre_state.clone()) .map_err(|error| Failure { @@ -159,15 +160,15 @@ pub fn run<'a, F, X>( let time = start.elapsed(); match result { - (Ok((state_root, gas_left, output)), traces) => Ok(Success { + (Ok((state_root, output)), gas_left, traces) => Ok(Success { state_root, - gas_used: initial_gas - gas_left, + gas_used: gas_left.map(|gas_left| initial_gas - gas_left).unwrap_or(initial_gas), output, time, traces, }), - (Err(error), traces) => Err(Failure { - gas_used: initial_gas, + (Err(error), gas_left, traces) => Err(Failure { + gas_used: gas_left.map(|gas_left| initial_gas - gas_left).unwrap_or(initial_gas), error, time, traces,