diff --git a/src/tools/rustc-perf b/src/tools/rustc-perf index c64bb60dd1636..d5055e78042c7 160000 --- a/src/tools/rustc-perf +++ b/src/tools/rustc-perf @@ -1 +1 @@ -Subproject commit c64bb60dd1636922b1ccbb82867bed934a99dbcb +Subproject commit d5055e78042c739deeb3fe0bef83fde0e5cc2594 diff --git a/src/tools/rustc-perf-wrapper/src/main.rs b/src/tools/rustc-perf-wrapper/src/main.rs index 991f4ea15ed63..951d36b788b53 100644 --- a/src/tools/rustc-perf-wrapper/src/main.rs +++ b/src/tools/rustc-perf-wrapper/src/main.rs @@ -1,3 +1,4 @@ +use std::fs::create_dir_all; use std::path::PathBuf; use std::process::Command; @@ -17,9 +18,6 @@ pub struct Args { #[clap(subcommand)] cmd: PerfCommand, - #[clap(flatten)] - opts: SharedOpts, - #[clap(flatten)] ctx: BuildContext, } @@ -28,22 +26,37 @@ pub struct Args { enum PerfCommand { /// Run `profile_local eprintln`. /// This executes the compiler on the given benchmarks and stores its stderr output. - Eprintln, + Eprintln { + #[clap(flatten)] + opts: SharedOpts, + }, /// Run `profile_local samply` /// This executes the compiler on the given benchmarks and profiles it with `samply`. /// You need to install `samply`, e.g. using `cargo install samply`. - Samply, + Samply { + #[clap(flatten)] + opts: SharedOpts, + }, /// Run `profile_local cachegrind`. /// This executes the compiler on the given benchmarks under `Cachegrind`. - Cachegrind, -} - -impl PerfCommand { - fn is_profiling(&self) -> bool { - match self { - PerfCommand::Eprintln | PerfCommand::Samply | PerfCommand::Cachegrind => true, - } - } + Cachegrind { + #[clap(flatten)] + opts: SharedOpts, + }, + Benchmark { + /// Identifier to associate benchmark results with + id: String, + + #[clap(flatten)] + opts: SharedOpts, + }, + Compare { + /// The name of the base artifact to be compared. + base: String, + + /// The name of the modified artifact to be compared. + modified: String, + }, } #[derive(Debug, clap::Parser)] @@ -52,6 +65,11 @@ struct SharedOpts { /// If unspecified, all benchmarks will be executed. #[clap(long, global = true, value_delimiter = ',')] include: Vec, + + /// Select the benchmarks matching a prefix in this comma-separated list that you don't want to run. + #[clap(long, global = true, value_delimiter = ',')] + exclude: Vec, + /// Select the scenarios that should be benchmarked. #[clap( long, @@ -87,35 +105,67 @@ fn main() { fn run(args: Args) { let mut cmd = Command::new(args.ctx.collector); + let db_path = args.ctx.results_dir.join("results.db"); + match &args.cmd { - PerfCommand::Eprintln => { - cmd.arg("profile_local").arg("eprintln"); + PerfCommand::Eprintln { opts } + | PerfCommand::Samply { opts } + | PerfCommand::Cachegrind { opts } => { + cmd.arg("profile_local"); + cmd.arg(match &args.cmd { + PerfCommand::Eprintln { .. } => "eprintln", + PerfCommand::Samply { .. } => "samply", + PerfCommand::Cachegrind { .. } => "cachegrind", + _ => unreachable!(), + }); + + cmd.arg("--out-dir").arg(&args.ctx.results_dir); + + apply_shared_opts(&mut cmd, opts); + execute_benchmark(&mut cmd, &args.ctx.compiler); + + println!("You can find the results at `{}`", args.ctx.results_dir.display()); } - PerfCommand::Samply => { - cmd.arg("profile_local").arg("samply"); + PerfCommand::Benchmark { id, opts } => { + cmd.arg("bench_local"); + cmd.arg("--db").arg(&db_path); + cmd.arg("--id").arg(id); + + apply_shared_opts(&mut cmd, opts); + create_dir_all(&args.ctx.results_dir).unwrap(); + execute_benchmark(&mut cmd, &args.ctx.compiler); } - PerfCommand::Cachegrind => { - cmd.arg("profile_local").arg("cachegrind"); + PerfCommand::Compare { base, modified } => { + cmd.arg("bench_cmp"); + cmd.arg("--db").arg(&db_path); + cmd.arg(base).arg(modified); + + create_dir_all(&args.ctx.results_dir).unwrap(); + cmd.status().expect("error while running rustc-perf bench_cmp"); } } - if args.cmd.is_profiling() { - cmd.arg("--out-dir").arg(&args.ctx.results_dir); - } +} - if !args.opts.include.is_empty() { - cmd.arg("--include").arg(args.opts.include.join(",")); +fn apply_shared_opts(cmd: &mut Command, opts: &SharedOpts) { + if !opts.include.is_empty() { + cmd.arg("--include").arg(opts.include.join(",")); } - if !args.opts.profiles.is_empty() { + if !opts.exclude.is_empty() { + cmd.arg("--exclude").arg(opts.exclude.join(",")); + } + if !opts.profiles.is_empty() { cmd.arg("--profiles") - .arg(args.opts.profiles.iter().map(|p| p.to_string()).collect::>().join(",")); + .arg(opts.profiles.iter().map(|p| p.to_string()).collect::>().join(",")); } - if !args.opts.scenarios.is_empty() { + if !opts.scenarios.is_empty() { cmd.arg("--scenarios") - .arg(args.opts.scenarios.iter().map(|p| p.to_string()).collect::>().join(",")); + .arg(opts.scenarios.iter().map(|p| p.to_string()).collect::>().join(",")); } - cmd.arg(&args.ctx.compiler); +} - println!("Running `rustc-perf` using `{}`", args.ctx.compiler.display()); +fn execute_benchmark(cmd: &mut Command, compiler: &PathBuf) { + cmd.arg(compiler); + println!("Running `rustc-perf` using `{}`", compiler.display()); const MANIFEST_DIR: &str = env!("CARGO_MANIFEST_DIR"); @@ -125,8 +175,4 @@ fn run(args: Args) { // with compile-time benchmarks. let cmd = cmd.current_dir(rustc_perf_dir); cmd.status().expect("error while running rustc-perf collector"); - - if args.cmd.is_profiling() { - println!("You can find the results at `{}`", args.ctx.results_dir.display()); - } }