Skip to content

Commit

Permalink
Delete RestrictedCanonicalizer
Browse files Browse the repository at this point in the history
It doesn't work properly with the new dialect registration framework.
This was latent and only was exposed when running through npcomp-opt.
Not worth investing the brainpower to fix now.
  • Loading branch information
silvasean committed Aug 27, 2021
1 parent d7320f3 commit 29e1b2f
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 123 deletions.
29 changes: 0 additions & 29 deletions include/npcomp/RefBackend/Passes.td
Original file line number Diff line number Diff line change
Expand Up @@ -37,33 +37,4 @@ def LowerToLLVM : Pass<"refback-lower-to-llvm", "ModuleOp"> {
let constructor = "mlir::NPCOMP::createLowerToLLVMPass();";
}

// TODO: Move this pass to upstream.
// TODO: This pass will still do "folding" on all ops.
// The applyPatternsAndFoldGreedily driver will need to be changed to restrict
// folding to the specified dialects as well.
// Perhaps a better design is having a pass that uses the conversion framework.
// The the pass constructor would take a set of op names, and it would
// set up a conversion target that makes all those ops illegal, and uses
// the canonicalization patterns from those ops to legalize them.
def RestrictedCanonicalizer : Pass<"restricted-canonicalize"> {
let summary = "Canonicalize operations";
let description = [{
This pass is the same as the regular `canonicalize` pass, but it only
applies a restricted set of patterns.

This is useful when a particular canonicalization is actually needed for
correctness of a lowering flow. For such cases, running a restricted set of
canonicalizations makes it clearer which passes are needed for correctness
and which passes are "just optimizations". This helps when debugging
miscompiles and other situations where the compiler is not behaving as
expected.
}];
let constructor = "mlir::NPCOMP::createRestrictedCanonicalizerPass()";
let options = [
ListOption<"includedDialects", "included-dialects", "std::string",
"Which dialects should be canonicalized",
"llvm::cl::MiscFlags::CommaSeparated">
];
}

#endif // NPCOMP_REFBACKEND_PASSES
62 changes: 2 additions & 60 deletions lib/RefBackend/RefBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -116,57 +116,6 @@ mlir::NPCOMP::createLowerAllocMemRefOpsPass() {
return std::make_unique<LowerAllocMemRefOps>();
}

//===----------------------------------------------------------------------===//
// RestrictedCanonicalizer
//===----------------------------------------------------------------------===//

namespace {
struct RestrictedCanonicalizer
: public RestrictedCanonicalizerBase<RestrictedCanonicalizer> {
void runOnOperation() override {
auto *context = &getContext();

// Find the dialects from their names.
DenseSet<StringRef> neededDialects;
for (const std::string &dialectName : includedDialects)
neededDialects.insert(dialectName);
DenseSet<Dialect *> dialectsToCanonicalize;
for (Dialect *dialect : context->getLoadedDialects()) {
if (neededDialects.count(dialect->getNamespace())) {
dialectsToCanonicalize.insert(dialect);
// Erase the dialect so that we can report an error below for any
// dialect names that are not loaded.
neededDialects.erase(dialect->getNamespace());
}
}

// Report a helpful error if a dialect is not found.
auto missingDialects = llvm::to_vector<6>(neededDialects);
if (!missingDialects.empty()) {
llvm::sort(missingDialects);
std::string buf;
llvm::raw_string_ostream os(buf);
llvm::interleaveComma(missingDialects, os);
llvm::report_fatal_error("restricted-canonicalize: unknown dialects: " +
os.str());
}

// Collect all canonicalization patterns from ops in the included dialects.
RewritePatternSet patterns(context);
for (AbstractOperation *op : context->getRegisteredOperations())
if (dialectsToCanonicalize.count(&op->dialect))
op->getCanonicalizationPatterns(patterns, context);

Operation *op = getOperation();
(void)applyPatternsAndFoldGreedily(op->getRegions(), std::move(patterns));
}
};
} // end anonymous namespace

std::unique_ptr<Pass> mlir::NPCOMP::createRestrictedCanonicalizerPass() {
return std::make_unique<RestrictedCanonicalizer>();
}

//===----------------------------------------------------------------------===//
// createRefBackendLoweringPipeline
//===----------------------------------------------------------------------===//
Expand Down Expand Up @@ -194,15 +143,8 @@ void mlir::NPCOMP::createRefBackendLoweringPipeline(
pm.addNestedPass<FuncOp>(createConvertShapeConstraintsPass());
// Run shape canonicalizations. In particular, this erases shape.assuming,
// now that we have converted shape constraints.
// TODO: This is kind of ugly. Either we use pass options or a constructor
// that takes C++ data structures. The former makes the pass usable on the
// command line (including reproducers), the latter makes the pass more
// convenient.
std::unique_ptr<Pass> shapeCanonicalizer =
createRestrictedCanonicalizerPass();
if (failed(shapeCanonicalizer->initializeOptions("included-dialects=shape")))
llvm::report_fatal_error("couldn't initialize restricted-canonicalize");
pm.addPass(std::move(shapeCanonicalizer));
// TODO: Don't canonicalize everything.
pm.addNestedPass<FuncOp>(createCanonicalizerPass());

// Lower shape ops to std.
pm.addPass(createConvertShapeToStandardPass());
Expand Down
34 changes: 0 additions & 34 deletions test/RefBackend/restricted-canonicalize.mlir

This file was deleted.

0 comments on commit 29e1b2f

Please sign in to comment.