Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
hzfan committed Jan 25, 2021
1 parent 56eb07d commit 455f3ff
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 33 deletions.
18 changes: 1 addition & 17 deletions src/arith/solve_linear_equation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ IntConstraintsTransform SolveLinearEquations(const IntConstraints& system_to_sol

// We have to transform ranges of the old variables into relations over new variables because
// new ranges are not enough usually.
for (const auto& old_var: system_to_solve->variables) {
for (const auto& old_var : system_to_solve->variables) {
if (system_to_solve->ranges.find(old_var) != system_to_solve->ranges.end()) {
const Range& old_range = system_to_solve->ranges.at(old_var);
PrimExpr express_by_new_vars = old_to_new_map.at(old_var);
Expand All @@ -442,22 +442,6 @@ IntConstraintsTransform SolveLinearEquations(const IntConstraints& system_to_sol
}
}
}
// for (const auto& p : system_to_solve->ranges) {
// const Var& old_var = p.first;
// const Range& old_range = p.second;
// if (old_to_new_map.count(old_var)) {
// PrimExpr express_by_new_vars = old_to_new_map[old_var];
// PrimExpr lower_cond = analyzer_solution.Simplify(old_range->min <= express_by_new_vars);
// PrimExpr upper_cond =
// analyzer_solution.Simplify(express_by_new_vars < old_range->min + old_range->extent);
// if (!tir::is_const_int(lower_cond, 1)) {
// new_relations.push_back(lower_cond);
// }
// if (!tir::is_const_int(upper_cond, 1)) {
// new_relations.push_back(upper_cond);
// }
// }
// }

// Add the rest conditions
for (const PrimExpr& cond : rest) {
Expand Down
16 changes: 0 additions & 16 deletions tests/python/unittest/test_te_autodiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,23 +343,7 @@ def test_reduction_init():
check_grad(B, A0)


def test_stable():
X = te.placeholder((32, 512, 16, 16), name="X")
W = te.placeholder((1024, 512, 1, 1), name="W")
strides, padding, dilation = 2, 0, 1
R = topi.nn.conv2d(X, W, strides, padding, dilation)
ones = topi.full_like(R, 1.0)
grads = te.gradient(R, [X], head=ones)
dag = tvm.auto_scheduler.ComputeDAG(grads)
repeat = 100
for i in range(repeat):
grads = te.gradient(R, [X], head=ones)
new_dag = tvm.auto_scheduler.ComputeDAG(grads)
assert str(dag) == str(new_dag)


if __name__ == "__main__":
test_basic_operation()
test_topi()
test_stride_dilation()
test_stable()

0 comments on commit 455f3ff

Please sign in to comment.