Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

feat(solc): add dependency graph implementation #750

Merged
merged 27 commits into from
Jan 5, 2022
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
49b3e0c
docs: document public functions
mattsse Dec 27, 2021
8697605
add mod
mattsse Dec 27, 2021
79bc6b1
feat(solc): add dependency graph
mattsse Dec 29, 2021
8690a72
detect version
mattsse Dec 30, 2021
9e2a752
add into sources
mattsse Dec 30, 2021
a6c5734
fix: canonicalize temp paths
mattsse Dec 30, 2021
e00bf5d
test: add graph tests
mattsse Dec 30, 2021
97db900
chore(clippy): make clippy happy
mattsse Dec 30, 2021
bc5441a
more wasm compat
mattsse Dec 30, 2021
c4d190e
Merge branch 'master' into matt/dependency-graph
mattsse Dec 30, 2021
45f10a4
chore: update changelog
mattsse Dec 30, 2021
314a703
wasm compat
mattsse Dec 30, 2021
fcb89c7
unused
mattsse Dec 30, 2021
b213efd
fix get source fill function
mattsse Dec 30, 2021
2213b7f
Update ethers-solc/src/resolver.rs
mattsse Dec 30, 2021
eac5de2
perf: use solang_parser to trim down deps
gakonst Dec 31, 2021
5f56668
resolve graph in compile
mattsse Dec 31, 2021
7acb8a0
refactor add node function
mattsse Dec 31, 2021
eacab8f
docs: clear up comment
mattsse Dec 31, 2021
62810d4
docs: typos
mattsse Dec 31, 2021
c748663
fix: make all versions unique based on their major minor path version
mattsse Dec 31, 2021
e0b79cd
prepare test
mattsse Jan 5, 2022
a1d8efd
Merge branch 'master' into matt/dependency-graph
mattsse Jan 5, 2022
42d96bb
docs: add more resolve docs
mattsse Jan 5, 2022
dc205cb
test: add lib change detection test
mattsse Jan 5, 2022
abd61f4
Merge branch 'master' into matt/dependency-graph
mattsse Jan 5, 2022
56fda04
test: update tests
mattsse Jan 5, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
538 changes: 535 additions & 3 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions ethers-solc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ tempdir = { version = "0.3.7", optional = true }
fs_extra = { version = "1.2.0", optional = true }
sha2 = { version = "0.9.8", default-features = false }
dunce = "1.0.2"
solang = { git = "https://github.com/hyperledger-labs/solang", default-features = false }
rayon = "1.5.1"

[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
home = "0.5.3"
Expand All @@ -56,6 +58,10 @@ tokio = { version = "1.12.0", features = ["full"] }
name = "compile_many"
harness = false

[[bench]]
name = "read_all"
harness = false

[[test]]
name = "project"
path = "tests/project.rs"
Expand Down
53 changes: 53 additions & 0 deletions ethers-solc/benches/read_all.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
//! read many sources
#[macro_use]
extern crate criterion;

use criterion::Criterion;
use ethers_core::rand;
use ethers_solc::artifacts::Source;
use rand::{distributions::Alphanumeric, Rng};
use std::{
fs::File,
io::{BufWriter, Write},
path::{Path, PathBuf},
};

fn read_all_benchmark(c: &mut Criterion) {
let root = tempdir::TempDir::new("bench_read_many").unwrap();
let inputs = prepare_contracts(root.path(), 8);

let mut group = c.benchmark_group("read many");
group.sample_size(10);
group.bench_function("sequential", |b| {
b.iter(|| {
Source::read_all(&inputs).unwrap();
});
});
group.bench_function("parallel", |b| {
b.iter(|| {
Source::par_read_all(&inputs).unwrap();
});
});
}

fn prepare_contracts(root: &Path, num: usize) -> Vec<PathBuf> {
let mut files = Vec::with_capacity(num);
for _ in 0..num {
let path = root.join(format!("file{}.sol", num));
let f = File::create(&path).unwrap();
let mut writer = BufWriter::new(f);

let mut rng = rand::thread_rng();

// let's assume a solidity file is between 2kb and 16kb
let n: usize = rng.gen_range(2..17);
let s: String = rng.sample_iter(&Alphanumeric).take(n * 1024).map(char::from).collect();
writer.write_all(&s.as_bytes()).unwrap();
writer.flush().unwrap();
files.push(path)
}
files
}

criterion_group!(benches, read_all_benchmark);
criterion_main!(benches);
44 changes: 42 additions & 2 deletions ethers-solc/src/artifacts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -393,15 +393,36 @@ pub struct Source {
}

impl Source {
/// this is a heuristically measured threshold at which we can generally expect a speedup by
/// using rayon's `par_iter`, See `Self::read_all_files`
pub const NUM_READ_PAR: usize = 8;

/// Reads the file content
pub fn read(file: impl AsRef<Path>) -> Result<Self, SolcIoError> {
let file = file.as_ref();
Ok(Self { content: fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))? })
}

/// Finds all source files under the given dir path and reads them all
/// Recursively finds all source files under the given dir path and reads them all
pub fn read_all_from(dir: impl AsRef<Path>) -> Result<Sources, SolcIoError> {
Self::read_all(utils::source_files(dir))
Self::read_all_files(utils::source_files(dir))
}

/// Reads all source files of the given vec
///
/// Depending on the len of the vec it will try to read the files in parallel
pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
use rayon::prelude::*;

if files.len() < Self::NUM_READ_PAR {
Self::read_all(files)
} else {
files
.par_iter()
.map(Into::into)
.map(|file| Self::read(&file).map(|source| (file, source)))
.collect()
}
}

/// Reads all files
Expand All @@ -417,6 +438,25 @@ impl Source {
.collect()
}

/// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
///
/// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
/// contains at least several paths. see also `Self::read_all_files`.
pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
I: IntoIterator<Item = T>,
<I as IntoIterator>::IntoIter: Send,
T: Into<PathBuf> + Send,
{
use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
files
.into_iter()
.par_bridge()
.map(Into::into)
.map(|file| Self::read(&file).map(|source| (file, source)))
.collect()
}

/// Generate a non-cryptographically secure checksum of the file's content
pub fn content_hash(&self) -> String {
let mut hasher = md5::Md5::new();
Expand Down
50 changes: 46 additions & 4 deletions ethers-solc/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::{
error::{Result, SolcError, SolcIoError},
hh::HardhatArtifact,
remappings::Remapping,
CompilerOutput,
utils, CompilerOutput, Source, Sources,
};
use ethers_core::{abi::Abi, types::Bytes};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
Expand Down Expand Up @@ -75,6 +75,47 @@ impl ProjectPathsConfig {
}
Ok(())
}

/// Returns all sources found under the project's configured `sources` path
pub fn read_sources(&self) -> Result<Sources> {
tracing::trace!("reading all sources from \"{}\"", self.sources.display());
Ok(Source::read_all_from(&self.sources)?)
}

/// Returns all sources found under the project's configured `test` path
pub fn read_tests(&self) -> Result<Sources> {
tracing::trace!("reading all tests from \"{}\"", self.tests.display());
Ok(Source::read_all_from(&self.tests)?)
}

/// Returns the combined set solidity file paths for `Self::sources` and `Self::tests`
pub fn input_files(&self) -> Vec<PathBuf> {
utils::source_files(&self.sources)
.into_iter()
.chain(utils::source_files(&self.tests))
.collect()
}

/// Returns the combined set of `Self::read_sources` + `Self::read_tests`
pub fn read_input_files(&self) -> Result<Sources> {
Ok(Source::read_all_files(self.input_files())?)
}

/// Attempts to find the path to the real solidity file that's imported via the given `import`
/// path by applying the configured remappings and checking the library dirs
pub fn resolve_library_import(&self, import: &Path) -> Option<PathBuf> {
// if the import path starts with the name of the remapping then we get the resolved path by
// removing the name and adding the remainder to the path of the remapping
if let Some(path) = self
.remappings
.iter()
.find_map(|r| import.strip_prefix(&r.name).ok().map(|p| Path::new(&r.path).join(p)))
{
Some(self.root.join(path))
} else {
utils::resolve_library(&self.libraries, import)
}
}
}

impl fmt::Display for ProjectPathsConfig {
Expand Down Expand Up @@ -102,9 +143,10 @@ pub enum PathStyle {
}

impl PathStyle {
/// Convert into a `ProjectPathsConfig` given the root path and based on the styled
pub fn paths(&self, root: impl AsRef<Path>) -> Result<ProjectPathsConfig> {
let root = root.as_ref();
let root = dunce::canonicalize(root).map_err(|err| SolcError::io(err, root))?;
let root = utils::canonicalize(root)?;

Ok(match self {
PathStyle::Dapptools => ProjectPathsConfig::builder()
Expand Down Expand Up @@ -215,7 +257,7 @@ impl ProjectPathsConfigBuilder {
.map(Ok)
.unwrap_or_else(std::env::current_dir)
.map_err(|err| SolcIoError::new(err, "."))?;
let root = dunce::canonicalize(&root).map_err(|err| SolcIoError::new(err, &root))?;
let root = utils::canonicalize(&root)?;
Ok(self.build_with_root(root))
}
}
Expand Down Expand Up @@ -484,7 +526,7 @@ impl<T: Into<PathBuf>> TryFrom<Vec<T>> for AllowedLibPaths {
.into_iter()
.map(|lib| {
let path: PathBuf = lib.into();
let lib = dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))?;
let lib = utils::canonicalize(&path)?;
Ok(lib)
})
.collect::<std::result::Result<Vec<_>, _>>()?;
Expand Down
15 changes: 10 additions & 5 deletions ethers-solc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use std::collections::btree_map::Entry;

pub mod cache;
pub mod hh;
mod resolver;
pub use hh::{HardhatArtifact, HardhatArtifacts};

mod compile;
Expand Down Expand Up @@ -153,8 +154,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// Returns all sources found under the project's configured sources path
#[tracing::instrument(skip_all, fields(name = "sources"))]
pub fn sources(&self) -> Result<Sources> {
tracing::trace!("reading all sources from \"{}\"", self.paths.sources.display());
Ok(Source::read_all_from(&self.paths.sources)?)
self.paths.read_sources()
}

/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
Expand Down Expand Up @@ -204,7 +204,8 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(libs)
}

/// Attempts to compile the contracts found at the configured location.
/// Attempts to compile the contracts found at the configured source location, see
/// `ProjectPathsConfig::sources`.
///
/// NOTE: this does not check if the contracts were successfully compiled, see
/// `CompilerOutput::has_error` instead.
Expand Down Expand Up @@ -295,6 +296,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(compiled)
}

/// Compiles all sources with their intended `Solc` version sequentially.
#[cfg(all(feature = "svm", feature = "async"))]
fn compile_sources(
&self,
Expand All @@ -314,6 +316,9 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(compiled)
}

/// Compiles all sources with their intended `Solc` version in parallel.
///
/// This runs `Self::solc_jobs` parallel `solc` jobs at most.
#[cfg(all(feature = "svm", feature = "async"))]
fn compile_many(
&self,
Expand Down Expand Up @@ -929,7 +934,7 @@ mod tests {
fn test_build_many_libs() {
use super::*;

let root = dunce::canonicalize("./test-data/test-contract-libs").unwrap();
let root = utils::canonicalize("./test-data/test-contract-libs").unwrap();

let paths = ProjectPathsConfig::builder()
.root(&root)
Expand All @@ -956,7 +961,7 @@ mod tests {
fn test_build_remappings() {
use super::*;

let root = dunce::canonicalize("./test-data/test-contract-remappings").unwrap();
let root = utils::canonicalize("./test-data/test-contract-remappings").unwrap();
let paths = ProjectPathsConfig::builder()
.root(&root)
.sources(root.join("src"))
Expand Down
Loading