Skip to content

Commit bb4fa5f

Browse files
author
Matthew Brookhart
committed
fix lint
1 parent 89cf38f commit bb4fa5f

File tree

5 files changed

+20
-25
lines changed

5 files changed

+20
-25
lines changed

python/tvm/relay/frontend/onnx.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2235,13 +2235,9 @@ def conditionally_squeeze_scalar(x):
22352235
one = _op.const(np.array([1]), dtype="int64")
22362236
two = _op.const(np.array([2]), dtype="int64")
22372237
three = _op.const(np.array([3]), dtype="int64")
2238-
two_ones = _op.const(np.array([1, 1]), dtype="int64")
22392238
three_ones = _op.const(np.array([1, 1, 1]), dtype="int64")
22402239
four_ones = _op.const(np.array([1, 1, 1, 1]), dtype="int64")
22412240

2242-
def pad_last_dim(x):
2243-
return _op.expand_dims(x, -1, 1)
2244-
22452241
# First Loop Vars
22462242
i = _expr.var("i", shape=(1,), dtype="int64")
22472243
scores_var = _expr.var("scores_var", shape=(_ty.Any(), _ty.Any(), _ty.Any()), dtype=dtype)

python/tvm/topi/cuda/nms.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,6 @@ def get_valid_counts_ir(
9898
valid_count = ib.buffer_ptr(valid_count)
9999
out = ib.buffer_ptr(out)
100100
out_indices = ib.buffer_ptr(out_indices)
101-
atomic_add_return = ib.allocate(
102-
valid_count.dtype, (1,), name="atomic_add_return", scope="local"
103-
)
104-
one_count = tvm.tir.const(1, dtype=valid_count.dtype)
105101
one = tvm.tir.const(1, dtype=out.dtype)
106102
if isinstance(score_threshold, float):
107103
score_threshold = tvm.ir.make_node("FloatImm", dtype="float32", value=score_threshold)

src/relay/backend/vm/lambda_lift.cc

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -115,21 +115,21 @@ class LambdaLifter : public ExprMutator {
115115
Array<Var> typed_captured_vars;
116116
Map<Var, Expr> rebinding_map;
117117
for (auto free_var : captured_vars) {
118-
auto var = Var(free_var->name_hint(), free_var->checked_type());
119-
typed_captured_vars.push_back(var);
120-
rebinding_map.Set(free_var, var);
118+
auto var = Var(free_var->name_hint(), free_var->checked_type());
119+
typed_captured_vars.push_back(var);
120+
rebinding_map.Set(free_var, var);
121121
}
122122

123123
if (recursive) {
124124
if (!captured_vars.empty()) {
125-
Array<Expr> fvs;
126-
for (auto fv : captured_vars) {
127-
fvs.push_back(fv);
128-
}
129-
lambda_map_.emplace(letrec_.back(), Call(global, fvs));
130-
} else {
131-
lambda_map_.emplace(letrec_.back(), global);
125+
Array<Expr> fvs;
126+
for (auto fv : captured_vars) {
127+
fvs.push_back(fv);
132128
}
129+
lambda_map_.emplace(letrec_.back(), Call(global, fvs));
130+
} else {
131+
lambda_map_.emplace(letrec_.back(), global);
132+
}
133133
}
134134

135135
auto body = Downcast<Function>(ExprMutator::VisitExpr_(func_node));
@@ -173,10 +173,12 @@ class LambdaLifter : public ExprMutator {
173173
// construct the "closure" function with fully annotated arguments, no longer relying
174174
// on type inference.
175175
auto before = Downcast<Function>(body)->params.size();
176-
auto rebound_body = Function(func->params, Bind(body->body, rebinding_map), func->ret_type, func->type_params, func->attrs, func->span);
176+
auto rebound_body = Function(func->params, Bind(body->body, rebinding_map), func->ret_type,
177+
func->type_params, func->attrs, func->span);
177178
auto after = Downcast<Function>(rebound_body)->params.size();
178179
CHECK_EQ(before, after);
179-
lifted_func = Function(typed_captured_vars, rebound_body, func->func_type_annotation(), free_type_vars);
180+
lifted_func =
181+
Function(typed_captured_vars, rebound_body, func->func_type_annotation(), free_type_vars);
180182
lifted_func = MarkClosure(lifted_func);
181183
}
182184

src/relay/op/tensor/transform.h

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,15 @@ template <typename AttrType>
4444
bool ConcatenateRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
4545
const TypeReporter& reporter) {
4646
// types: [data, result]
47-
ICHECK_EQ(types.size(), 2)
48-
<< "the arity of concatenate is 2, not " << types.size();
47+
ICHECK_EQ(types.size(), 2) << "the arity of concatenate is 2, not " << types.size();
4948
/* If we receive a tuple we can continue, if we receive
5049
* anything but an incomplete type we should signal an
5150
* error.
5251
*/
5352
const auto* tensor_tuple = types[0].as<TupleTypeNode>();
5453
if (tensor_tuple == nullptr) {
5554
reporter->GetDiagCtx().EmitFatal(
56-
Diagnostic::Error(reporter->GetSpan())
55+
Diagnostic::Error(reporter->GetSpan())
5756
<< "concatenate requires a tuple of tensors as the first argument, found "
5857
<< PrettyPrint(types[0]));
5958
return false;
@@ -63,8 +62,8 @@ bool ConcatenateRel(const Array<Type>& types, int num_inputs, const Attrs& attrs
6362

6463
const auto* param = attrs.as<AttrType>();
6564
if (param == nullptr) {
66-
reporter->GetDiagCtx().EmitFatal(
67-
Diagnostic::Error(reporter->GetSpan()) << "the call attributes are not defined");
65+
reporter->GetDiagCtx().EmitFatal(Diagnostic::Error(reporter->GetSpan())
66+
<< "the call attributes are not defined");
6867
return false;
6968
}
7069

tests/python/frontend/onnx/test_forward.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def get_tvm_output_with_vm(
5555
)
5656

5757
from tvm.relay import transform
58+
5859
# print(mod.astext(show_meta_data=True))
5960
# self.mod = transform.AnnotateSpans()(mod)
6061
# print(mod.astext(show_meta_data=False))
@@ -3940,4 +3941,5 @@ def test_loop():
39403941
if __name__ == "__main__":
39413942
import sys
39423943
import pytest
3944+
39433945
pytest.main(sys.argv)

0 commit comments

Comments
 (0)