Skip to content
This repository has been archived by the owner on Jun 19, 2024. It is now read-only.

Commit

Permalink
alleviate softmax datatype check (#24)
Browse files Browse the repository at this point in the history
  • Loading branch information
zzpmiracle committed Nov 25, 2022
1 parent 862576b commit 997551d
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions lib/Dialect/Torch/Transforms/DecomposeComplexOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -321,9 +321,11 @@ class DecomposeAtenSoftmaxIntOp : public OpRewritePattern<AtenSoftmaxIntOp> {
LogicalResult matchAndRewrite(AtenSoftmaxIntOp op,
PatternRewriter &rewriter) const override {
Value self = op.self();
if (!op.dtype().getType().isa<Torch::NoneType>())
return rewriter.notifyMatchFailure(
op, "Unimplemented non-None dtype for softmax");

// Do not need check dtype args here, since dtype have been infered in op.getType()
// if (!op.dtype().getType().isa<Torch::NoneType>())
// return rewriter.notifyMatchFailure(
// op, "Unimplemented non-None dtype for softmax");

BaseTensorType tensorType = self.getType().cast<BaseTensorType>();
if (!tensorType.hasDtype() || !tensorType.getDtype().isa<mlir::FloatType>())
Expand Down

0 comments on commit 997551d

Please sign in to comment.