Skip to content

Commit

Permalink
Initial implementation of feature exclusion based on detecting depend…
Browse files Browse the repository at this point in the history
…encies of features
  • Loading branch information
taiki-e committed Nov 8, 2020
1 parent 0d04c91 commit e31a5c5
Show file tree
Hide file tree
Showing 7 changed files with 230 additions and 90 deletions.
169 changes: 169 additions & 0 deletions src/features.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::collections::{BTreeMap, BTreeSet};

use crate::{
metadata::{Dependency, Metadata},
PackageId,
Expand Down Expand Up @@ -59,3 +61,170 @@ impl Features {
self.features.iter().any(|f| f == name)
}
}

pub(crate) fn filter_powerset<'a>(
ps: Vec<Vec<&'a str>>,
fd: BTreeMap<&'a str, BTreeSet<&'a str>>,
) -> Vec<Vec<&'a str>> {
ps.into_iter()
.filter(|a| !a.iter().filter_map(|b| fd.get(b)).any(|c| a.iter().any(|d| c.contains(d))))
.collect()
}

pub(crate) fn feature_deps<'a>(
map: &'a BTreeMap<String, Vec<String>>,
) -> BTreeMap<&'a str, BTreeSet<&'a str>> {
let mut feat_deps = BTreeMap::new();
for feat in map.keys() {
let mut set = BTreeSet::new();
fn f<'a>(
map: &'a BTreeMap<String, Vec<String>>,
set: &mut BTreeSet<&'a str>,
curr: &str,
root: &str,
) {
if let Some(v) = map.get(curr) {
for x in v {
if x != root && set.insert(x) {
f(map, set, x, root);
}
}
}
}
f(map, &mut set, feat, feat);
feat_deps.insert(&**feat, set);
}
feat_deps
}

pub(crate) fn powerset<T: Clone>(
iter: impl IntoIterator<Item = T>,
depth: Option<usize>,
) -> Vec<Vec<T>> {
iter.into_iter().fold(vec![vec![]], |mut acc, elem| {
let ext = acc.clone().into_iter().map(|mut curr| {
curr.push(elem.clone());
curr
});
if let Some(depth) = depth {
acc.extend(ext.filter(|f| f.len() <= depth));
} else {
acc.extend(ext);
}
acc
})
}

#[cfg(test)]
mod tests {
use super::{feature_deps, filter_powerset, powerset};
use std::{
collections::{BTreeMap, BTreeSet},
iter::FromIterator,
};

fn s(s: &str) -> String {
s.into()
}

macro_rules! map {
($(($key:expr, $value:expr)),* $(,)?) => {
BTreeMap::from_iter(vec![$(($key.into(), $value)),*])
};
}

macro_rules! set {
($($expr:expr),* $(,)?) => {
BTreeSet::from_iter(vec![$($expr),*])
};
}

#[test]
fn feature_deps1() {
let map = map![("a", vec![]), ("b", vec![s("a")]), ("c", vec![s("b")]),];
let fd = feature_deps(&map);
assert_eq!(fd, map![("a", set![]), ("b", set!["a"]), ("c", set!["a", "b"])]);
let ps = powerset(vec!["a", "b", "c"], None);
assert_eq!(ps, vec![
vec![],
vec!["a"],
vec!["b"],
vec!["a", "b"],
vec!["c"],
vec!["a", "c"],
vec!["b", "c"],
vec!["a", "b", "c"],
]);
let filtered = filter_powerset(ps, fd);
assert_eq!(filtered, vec![vec![], vec!["a"], vec!["b"], vec!["c"]]);
}

#[test]
fn powerset_full() {
let v = powerset(vec![1, 2, 3, 4], None);
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![1, 2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![1, 2, 4],
vec![3, 4],
vec![1, 3, 4],
vec![2, 3, 4],
vec![1, 2, 3, 4],
]);
}

#[test]
fn powerset_depth1() {
let v = powerset(vec![1, 2, 3, 4], Some(1));
assert_eq!(v, vec![vec![], vec![1], vec![2], vec![3], vec![4],]);
}

#[test]
fn powerset_depth2() {
let v = powerset(vec![1, 2, 3, 4], Some(2));
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![3, 4],
]);
}

#[test]
fn powerset_depth3() {
let v = powerset(vec![1, 2, 3, 4], Some(3));
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![1, 2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![1, 2, 4],
vec![3, 4],
vec![1, 3, 4],
vec![2, 3, 4],
]);
}
}
93 changes: 3 additions & 90 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,9 @@ fn determine_kind<'a>(cx: &'a Context<'_>, id: &PackageId, progress: &mut Progre
Kind::Each { features }
}
} else if cx.feature_powerset {
let features = powerset(features, cx.depth);
let feature_deps = features::feature_deps(&package.features);
let powerset = features::powerset(features, cx.depth);
let features = features::filter_powerset(powerset, feature_deps);

if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() {
progress.total += 1;
Expand Down Expand Up @@ -353,92 +355,3 @@ fn cargo_clean(cx: &Context<'_>, id: &PackageId) -> Result<()> {

line.exec()
}

fn powerset<T: Clone>(iter: impl IntoIterator<Item = T>, depth: Option<usize>) -> Vec<Vec<T>> {
iter.into_iter().fold(vec![vec![]], |mut acc, elem| {
let ext = acc.clone().into_iter().map(|mut curr| {
curr.push(elem.clone());
curr
});
if let Some(depth) = depth {
acc.extend(ext.filter(|f| f.len() <= depth));
} else {
acc.extend(ext);
}
acc
})
}

#[cfg(test)]
mod tests {
use super::powerset;

#[test]
fn powerset_full() {
let v = powerset(vec![1, 2, 3, 4], None);
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![1, 2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![1, 2, 4],
vec![3, 4],
vec![1, 3, 4],
vec![2, 3, 4],
vec![1, 2, 3, 4],
]);
}

#[test]
fn powerset_depth1() {
let v = powerset(vec![1, 2, 3, 4], Some(1));
assert_eq!(v, vec![vec![], vec![1], vec![2], vec![3], vec![4],]);
}

#[test]
fn powerset_depth2() {
let v = powerset(vec![1, 2, 3, 4], Some(2));
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![3, 4],
]);
}

#[test]
fn powerset_depth3() {
let v = powerset(vec![1, 2, 3, 4], Some(3));
assert_eq!(v, vec![
vec![],
vec![1],
vec![2],
vec![1, 2],
vec![3],
vec![1, 3],
vec![2, 3],
vec![1, 2, 3],
vec![4],
vec![1, 4],
vec![2, 4],
vec![1, 2, 4],
vec![3, 4],
vec![1, 3, 4],
vec![2, 3, 4],
]);
}
}
2 changes: 2 additions & 0 deletions tests/fixtures/powerset_deduplication/.cargo/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[build]
target-dir = "../../../target"
5 changes: 5 additions & 0 deletions tests/fixtures/powerset_deduplication/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 18 additions & 0 deletions tests/fixtures/powerset_deduplication/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[package]
name = "deduplication"
version = "0.1.0"
authors = ["Taiki Endo <te316e89@gmail.com>"]
publish = false

[workspace]

[features]
a = []
b = ["a"]
c = ["b"]
d = []
e = ["b", "d"]

[dependencies]

[dev-dependencies]
1 change: 1 addition & 0 deletions tests/fixtures/powerset_deduplication/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
fn main() {}
32 changes: 32 additions & 0 deletions tests/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -454,6 +454,38 @@ fn feature_powerset() {
.assert_stderr_not_contains("--features a,a");
}

#[test]
fn powerset_deduplication() {
cargo_hack(["check", "--feature-powerset"])
.test_dir("tests/fixtures/powerset_deduplication")
.assert_success()
.assert_stderr_contains(
"
running `cargo check --no-default-features` on deduplication (1/11)
running `cargo check --no-default-features --features a` on deduplication (2/11)
running `cargo check --no-default-features --features b` on deduplication (3/11)
running `cargo check --no-default-features --features c` on deduplication (4/11)
running `cargo check --no-default-features --features d` on deduplication (5/11)
running `cargo check --no-default-features --features a,d` on deduplication (6/11)
running `cargo check --no-default-features --features b,d` on deduplication (7/11)
running `cargo check --no-default-features --features c,d` on deduplication (8/11)
running `cargo check --no-default-features --features e` on deduplication (9/11)
running `cargo check --no-default-features --features c,e` on deduplication (10/11)
running `cargo check --no-default-features --all-features` on deduplication (11/11)
",
)
.assert_stderr_not_contains(
"
a,b
b,c
a,c
a,e
b,e
d,e
",
);
}

#[test]
fn feature_powerset_depth() {
cargo_hack(["check", "--feature-powerset", "--depth", "2"])
Expand Down

0 comments on commit e31a5c5

Please sign in to comment.