Skip to content

Commit

Permalink
[benchmarks] Add no-skip argument. (pytorch#6484)
Browse files Browse the repository at this point in the history
  • Loading branch information
ysiraichi authored and amithrm committed Mar 1, 2024
1 parent 8d6de0a commit ea4bafe
Showing 1 changed file with 16 additions and 4 deletions.
20 changes: 16 additions & 4 deletions benchmarks/experiment_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,17 @@ def generate_and_run_all_configs(self):
logger.info(f"SKIP already completed benchmark")
continue

# Skip unsupported config.
if not self.model_loader.is_compatible(benchmark_model,
benchmark_experiment,
self._args.strict_compatible):
# Check if we should execute or skip the current configuration.
# A configuration SHOULD be skipped if and only if:
#
# 1. --no-skip was not specified; AND
#
# 2. the model is not compatible with the experiment configuration
#
# Otherwise, we should go ahead and execute it.
if (not self._args.no_skip and not self.model_loader.is_compatible(
benchmark_model, benchmark_experiment,
self._args.strict_compatible)):
logger.warning("SKIP incompatible model and experiment configs.")
self._save_results(benchmark_experiment.to_dict(),
benchmark_model.to_dict(), {"error": "SKIP"})
Expand Down Expand Up @@ -881,6 +888,11 @@ def __str__(self):
action="store_true",
help="Strictly skips some models including models without installation file or causing stackdump.",
)
parser.add_argument(
"--no-skip",
action="store_true",
help="Do not skip any model.",
)
return parser.parse_args(args)


Expand Down

0 comments on commit ea4bafe

Please sign in to comment.