Skip to content

Commit

Permalink
Merge pull request #1237 from SRetip/facade_for_dependencies
Browse files Browse the repository at this point in the history
READY : Facade for dependencies
  • Loading branch information
Wandalen authored Mar 22, 2024
2 parents 9ced20c + ab67573 commit afd6a8a
Show file tree
Hide file tree
Showing 7 changed files with 126 additions and 64 deletions.
1 change: 1 addition & 0 deletions module/move/willbe/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ tracing-subscriber = { version = "0.3", optional = true }
indicatif = { version = "0.17", optional = true }
prettytable-rs = "0.10"
serde_json = "1.0" # for CargoMetadata::Package::metadata (need serde_json::Value)
serde = "1.0" # for CargoMetadata::Package

## internal
crates_tools = { workspace = true }
Expand Down
46 changes: 21 additions & 25 deletions module/move/willbe/src/action/list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,7 @@ mod private
};
// aaa : for Petro : don't use cargo_metadata and Package directly, use facade
// aaa : ✅
use cargo_metadata::
{
Dependency,
DependencyKind,
};

use petgraph::prelude::{ Dfs, EdgeRef };
use former::Former;

Expand Down Expand Up @@ -312,40 +308,40 @@ mod private
visited : &mut HashSet< String >
)
{
for dependency in package.dependencies()
for dependency in &package.dependencies()
{
if dependency.path.is_some() && !args.dependency_sources.contains( &DependencySource::Local ) { continue; }
if dependency.path.is_none() && !args.dependency_sources.contains( &DependencySource::Remote ) { continue; }
let dep_id = format!( "{}+{}+{}", dependency.name, dependency.req, dependency.path.as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() );
if dependency.path().is_some() && !args.dependency_sources.contains( &DependencySource::Local ) { continue; }
if dependency.path().is_none() && !args.dependency_sources.contains( &DependencySource::Remote ) { continue; }
let dep_id = format!( "{}+{}+{}", dependency.name(), dependency.req(), dependency.path().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() );

let mut temp_vis = visited.clone();
let dependency_rep = process_dependency( workspace, dependency, args, &mut temp_vis );

match dependency.kind
match dependency.kind()
{
DependencyKind::Normal if args.dependency_categories.contains( &DependencyCategory::Primary ) => dep_rep.normal_dependencies.push( dependency_rep ),
DependencyKind::Development if args.dependency_categories.contains( &DependencyCategory::Dev ) => dep_rep.dev_dependencies.push( dependency_rep ),
DependencyKind::Build if args.dependency_categories.contains( &DependencyCategory::Build ) => dep_rep.build_dependencies.push( dependency_rep ),
workspace::DependencyKind::Normal if args.dependency_categories.contains( &DependencyCategory::Primary ) => dep_rep.normal_dependencies.push( dependency_rep ),
workspace::DependencyKind::Development if args.dependency_categories.contains( &DependencyCategory::Dev ) => dep_rep.dev_dependencies.push( dependency_rep ),
workspace::DependencyKind::Build if args.dependency_categories.contains( &DependencyCategory::Build ) => dep_rep.build_dependencies.push( dependency_rep ),
_ => { visited.remove( &dep_id ); std::mem::swap( &mut temp_vis, visited ); }
}

*visited = std::mem::take( &mut temp_vis );
}
}

fn process_dependency( workspace : &Workspace, dep : &Dependency, args : &ListOptions, visited : &mut HashSet< String > ) -> ListNodeReport
fn process_dependency( workspace : &Workspace, dep : &workspace::Dependency, args : &ListOptions, visited : &mut HashSet< String > ) -> ListNodeReport
{
let mut dep_rep = ListNodeReport
{
name : dep.name.clone(),
version : if args.info.contains( &PackageAdditionalInfo::Version ) { Some( dep.req.to_string() ) } else { None },
path : if args.info.contains( &PackageAdditionalInfo::Path ) { dep.path.as_ref().map( | p | p.clone().into_std_path_buf() ) } else { None },
name : dep.name().clone(),
version : if args.info.contains( &PackageAdditionalInfo::Version ) { Some( dep.req().to_string() ) } else { None },
path : if args.info.contains( &PackageAdditionalInfo::Path ) { dep.path().as_ref().map( | p | p.clone().into_std_path_buf() ) } else { None },
normal_dependencies : vec![],
dev_dependencies : vec![],
build_dependencies : vec![],
};

let dep_id = format!( "{}+{}+{}", dep.name, dep.req, dep.path.as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() );
let dep_id = format!( "{}+{}+{}", dep.name(), dep.req(), dep.path().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() );
// if this is a cycle (we have visited this node before)
if visited.contains( &dep_id )
{
Expand All @@ -356,7 +352,7 @@ mod private

// if we have not visited this node before, mark it as visited
visited.insert( dep_id );
if let Some( path ) = &dep.path
if let Some( path ) = &dep.path()
{
if let Some( package ) = workspace.package_find_by_manifest( path.as_std_path().join( "Cargo.toml" ) )
{
Expand Down Expand Up @@ -447,17 +443,17 @@ mod private
.map( | m | m[ "name" ].to_string().trim().replace( '\"', "" ) )
.unwrap_or_default();

let dep_filter = move | _p : &WorkspacePackage, d : &Dependency |
let dep_filter = move | _p : &WorkspacePackage, d : &workspace::Dependency |
{
(
args.dependency_categories.contains( &DependencyCategory::Primary ) && d.kind == DependencyKind::Normal
|| args.dependency_categories.contains( &DependencyCategory::Dev ) && d.kind == DependencyKind::Development
|| args.dependency_categories.contains( &DependencyCategory::Build ) && d.kind == DependencyKind::Build
args.dependency_categories.contains( &DependencyCategory::Primary ) && d.kind() == workspace::DependencyKind::Normal
|| args.dependency_categories.contains( &DependencyCategory::Dev ) && d.kind() == workspace::DependencyKind::Development
|| args.dependency_categories.contains( &DependencyCategory::Build ) && d.kind() == workspace::DependencyKind::Build
)
&&
(
args.dependency_sources.contains( &DependencySource::Remote ) && d.path.is_none()
|| args.dependency_sources.contains( &DependencySource::Local ) && d.path.is_some()
args.dependency_sources.contains( &DependencySource::Remote ) && d.path().is_none()
|| args.dependency_sources.contains( &DependencySource::Local ) && d.path().is_some()
)
};

Expand Down
16 changes: 5 additions & 11 deletions module/move/willbe/src/action/readme_health_table_renew.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,7 @@ mod private
io::{ Write, Read, Seek, SeekFrom },
collections::HashMap,
};
use cargo_metadata::
{
Dependency,
DependencyKind,
};

// aaa : for Petro : don't use cargo_metadata and Package directly, use facade
// aaa : ✅

Expand All @@ -36,9 +32,7 @@ mod private
}
};
use manifest::private::repo_url;
use workspace::Workspace;
use _path::AbsolutePath;
use workspace::WorkspacePackage;

static TAG_TEMPLATE: std::sync::OnceLock< Regex > = std::sync::OnceLock::new();
static CLOSE_TAG: std::sync::OnceLock< Regex > = std::sync::OnceLock::new();
Expand Down Expand Up @@ -328,23 +322,23 @@ mod private
}

/// Return topologically sorted modules name, from packages list, in specified directory.
fn directory_names( path : PathBuf, packages : &[ WorkspacePackage ] ) -> Result< Vec< String > >
fn directory_names( path : PathBuf, packages : &[ workspace::WorkspacePackage ] ) -> Result< Vec< String > >
{
let path_clone = path.clone();
let module_package_filter: Option< Box< dyn Fn( &WorkspacePackage ) -> bool > > = Some
let module_package_filter: Option< Box< dyn Fn( &workspace::WorkspacePackage ) -> bool > > = Some
(
Box::new
(
move | p |
p.publish().is_none() && p.manifest_path().starts_with( &path )
)
);
let module_dependency_filter: Option< Box< dyn Fn( &WorkspacePackage, &Dependency) -> bool > > = Some
let module_dependency_filter: Option< Box< dyn Fn( &workspace::WorkspacePackage, &workspace::Dependency ) -> bool > > = Some
(
Box::new
(
move | _, d |
d.path.is_some() && d.kind != DependencyKind::Development && d.path.as_ref().unwrap().starts_with( &path_clone )
d.path().is_some() && d.kind() != workspace::DependencyKind::Development && d.path().as_ref().unwrap().starts_with( &path_clone )
)
);
let module_packages_map = packages::filter
Expand Down
11 changes: 5 additions & 6 deletions module/move/willbe/src/entity/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ mod private
use std::fmt::Formatter;
use std::hash::Hash;
use std::path::PathBuf;
use cargo_metadata::{ Dependency, DependencyKind };

use process_tools::process;
use manifest::{ Manifest, ManifestError };
Expand Down Expand Up @@ -763,14 +762,14 @@ mod private
}
}

impl From< &Dependency > for CrateId
impl From< &workspace::Dependency > for CrateId
{
fn from( value : &Dependency ) -> Self
fn from( value : &workspace::Dependency ) -> Self
{
Self
{
name : value.name.clone(),
path : value.path.clone().map( | path | AbsolutePath::try_from( path ).unwrap() ),
name : value.name().clone(),
path : value.path().clone().map( | path | AbsolutePath::try_from( path ).unwrap() ),
}
}
}
Expand Down Expand Up @@ -803,7 +802,7 @@ mod private
let deps = package
.dependencies()
.iter()
.filter( | dep | ( with_remote || dep.path.is_some() ) && ( with_dev || dep.kind != DependencyKind::Development ) )
.filter( | dep | ( with_remote || dep.path().is_some() ) && ( with_dev || dep.kind() != workspace::DependencyKind::Development ) )
.map( CrateId::from )
.collect::< HashSet< _ > >();

Expand Down
4 changes: 2 additions & 2 deletions module/move/willbe/src/entity/packages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ mod private
fmt::Formatter,
collections::{ HashMap, HashSet },
};
use cargo_metadata::{ Dependency };
use workspace::WorkspacePackage;
use crate::workspace::Dependency;

/// Type aliasing for String
pub type PackageName = String;
Expand Down Expand Up @@ -89,7 +89,7 @@ mod private
package.dependencies()
.iter()
.filter( | &d | dependency_filter( package, d ) )
.map( | d | d.name.clone() )
.map( | d | d.name().clone() )
.collect::< HashSet< _ > >()
)
)
Expand Down
Loading

0 comments on commit afd6a8a

Please sign in to comment.