Skip to content

Commit c31512d

Browse files
author
Matthew
committed
fix axis normalization
fix lint fix lint again
1 parent bf3b5f2 commit c31512d

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

python/tvm/relay/transform/fake_quantization_to_integer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,9 +139,8 @@ def conv2d(expr, type_map):
139139
out = relay.qnn.op.conv2d(
140140
x, weight, x_t.zero_point, w_t.zero_point, x_t.scale, w_t.scale, **attrs
141141
)
142-
scale_shape = infer_shape(conv_scale)
143142
out_layout = attrs["out_layout"] if attrs["out_layout"] != "" else attrs["data_layout"]
144-
out_axis = tvm.tir.bijective_layout(out_layout, "NCHW").backward_index(list(range(4)))[1]
143+
out_axis = bijective_layout(out_layout, "NCHW").backward_index(list(range(4)))[1]
145144
return [out, TensorAffineType(conv_scale, conv_zp, out.attrs.out_dtype, out_axis.value)]
146145

147146

@@ -252,6 +251,7 @@ def clip(expr, type_map):
252251

253252
@register_fake_quantization_to_integer("nn.relu")
254253
def relu(expr, type_map):
254+
"""Rewrite a relu op"""
255255
arg = expr.args[0]
256256
t = type_map[arg]
257257
scale_shape = infer_shape(t.scale)

src/relay/qnn/op/dequantize.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,13 +54,13 @@ bool DequantizeRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
5454
const auto* dequantize_attrs = attrs.as<DequantizeAttrs>();
5555
int axis = dequantize_attrs->axis;
5656
auto rank = static_cast<int>(data->shape.size());
57+
axis = (axis < 0) ? ((rank > 0) ? data->shape.size() + axis : 0) : axis;
5758

5859
// If zero point and scale are scalar then axis doesnt matter.
5960
bool scale_is_scalar = (types[1].as<TensorTypeNode>())->shape.size() == 0;
6061
bool zp_is_scalar = (types[2].as<TensorTypeNode>())->shape.size() == 0;
6162

6263
if (!(scale_is_scalar && zp_is_scalar)) {
63-
axis = (axis < 0) ? ((rank > 0) ? data->shape.size() + axis : 0) : axis;
6464
ICHECK_LT(axis, rank > 0 ? rank : 1) << "axis " << dequantize_attrs->axis << " is out of range";
6565
ICHECK_GE(axis, 0) << "axis " << dequantize_attrs->axis << " is out of range";
6666
}

src/relay/qnn/op/quantize.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,13 +52,13 @@ bool QuantizeRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
5252
const auto* quantize_attrs = attrs.as<QuantizeAttrs>();
5353
int axis = quantize_attrs->axis;
5454
auto rank = static_cast<int>(data->shape.size());
55+
axis = (axis < 0) ? ((rank > 0) ? data->shape.size() + axis : 0) : axis;
5556

5657
// If zero point and scale are scalar then axis doesnt matter.
5758
bool scale_is_scalar = (types[1].as<TensorTypeNode>())->shape.size() == 0;
5859
bool zp_is_scalar = (types[2].as<TensorTypeNode>())->shape.size() == 0;
5960

6061
if (!(scale_is_scalar && zp_is_scalar)) {
61-
axis = (axis < 0) ? ((rank > 0) ? data->shape.size() + axis : 0) : axis;
6262
ICHECK_LT(axis, rank > 0 ? rank : 1) << "axis " << quantize_attrs->axis << " is out of range";
6363
ICHECK_GE(axis, 0) << "axis " << quantize_attrs->axis << " is out of range";
6464
}

0 commit comments

Comments
 (0)