Skip to content

Commit c118b08

Browse files
codeislife99Ubuntu
andauthored
Support negative pad values (apache#7375)
* Support negative pad values * Update test_op_level2.py * Update pad.cc * Update test_op_level2.py * PR Comments * Update pad.cc * Address PR Comments * CI Error * CI Error * CI Error Co-authored-by: Ubuntu <ubuntu@ip-172-31-28-115.us-east-2.compute.internal>
1 parent f1b9663 commit c118b08

File tree

2 files changed

+43
-17
lines changed

2 files changed

+43
-17
lines changed

src/relay/op/nn/pad.cc

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -139,14 +139,13 @@ bool PadRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
139139
ICHECK(width1 != nullptr);
140140
ICHECK(width2 != nullptr);
141141

142-
ICHECK(*width1 >= 0) << "Param width elements should be positive but first pad width at "
143-
<< "index " << i << " is " << *width1 << ".";
144-
ICHECK(*width2 >= 0) << "Param width elements should be positive but first pad width at "
145-
<< "index " << i << " is " << *width2 << ".";
146-
147142
if (!data->shape[i].as<tir::AnyNode>()) {
148143
auto padding = tir::make_const(data->shape[i].dtype(), *width1 + *width2);
149144
oshape.push_back(data->shape[i] + padding);
145+
if (tir::as_const_int(data->shape[i])) {
146+
ICHECK(topi::detail::GetConstInt(data->shape[i] + padding) >= 0)
147+
<< "Output shape post padding should be positive but got " << data->shape[i] + padding;
148+
}
150149
} else {
151150
oshape.push_back(data->shape[i]);
152151
}

tests/python/relay/test_op_level2.py

Lines changed: 39 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1171,35 +1171,62 @@ def test_flatten_infer_type():
11711171

11721172
@tvm.testing.uses_gpu
11731173
def test_pad_infer_type():
1174-
# entirely concrete case
1174+
# entirely concrete cases
11751175
n, c, h, w = 1, 2, 3, 4
11761176
t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
11771177
y = relay.nn.pad(t, ((1, 1), (2, 2), (3, 3), (4, 4)))
1178-
"pad_width=" in y.astext()
11791178
yy = run_infer_type(y)
11801179
assert yy.checked_type == relay.TensorType((3, 6, 9, 12), "float32")
11811180

1181+
n, c, h, w = 4, 6, 3, 5
1182+
t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
1183+
y = relay.nn.pad(t, ((-1, -1), (2, -2), (0, -3), (4, 4)), pad_mode="reflect")
1184+
yy = run_infer_type(y)
1185+
assert yy.checked_type == relay.TensorType((2, 6, 0, 13), "float32")
1186+
11821187
# some symbolic values
11831188
n, c, h, w = te.size_var("n"), 2, 3, te.size_var("w")
11841189
t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
11851190
y = relay.nn.pad(t, ((1, 1), (2, 2), (3, 3), (4, 4)))
11861191
yy = run_infer_type(y)
11871192
assert yy.checked_type == relay.TensorType((n + 2, 6, 9, w + 8), "float32")
11881193

1194+
n, c, h, w = te.size_var("n"), te.size_var("c"), te.size_var("h"), te.size_var("w")
1195+
t = relay.var("t", relay.TensorType((n, c, h, w), "float32"))
1196+
y = relay.nn.pad(t, ((-1, -1), (-2, -2), (1, -3), (4, 4)))
1197+
yy = run_infer_type(y)
1198+
assert yy.checked_type == relay.TensorType((n + (-2), c + (-4), h + (-2), w + 8), "float32")
1199+
11891200

11901201
@tvm.testing.uses_gpu
11911202
def test_pad_run():
11921203
def _test_run(dtype):
1193-
dshape = (4, 10, 7, 7)
1194-
x = relay.var("x", shape=dshape)
1195-
y = relay.nn.pad(x, ((1, 1), (2, 2), (3, 3), (4, 4)))
1196-
func = relay.Function([x], y)
1197-
data = np.random.uniform(size=dshape).astype(dtype)
1198-
ref_res = np.pad(data, ((1, 1), (2, 2), (3, 3), (4, 4)), "constant")
1199-
for target, ctx in tvm.testing.enabled_targets():
1200-
intrp1 = relay.create_executor("graph", ctx=ctx, target=target)
1201-
op_res1 = intrp1.evaluate(func)(data)
1202-
tvm.testing.assert_allclose(op_res1.asnumpy(), ref_res, rtol=1e-5, atol=1e-5)
1204+
dshape_list = [(4, 10, 7, 7), (4, 6, 3, 5)]
1205+
pad_list = [((1, 1), (2, 2), (3, 3), (4, 4)), ((-1, -1), (2, -2), (0, -2), (4, 4))]
1206+
1207+
for dshape, pad in zip(dshape_list, pad_list):
1208+
x = relay.var("x", shape=dshape)
1209+
y = relay.nn.pad(x, pad)
1210+
func = relay.Function([x], y)
1211+
data = np.random.uniform(size=dshape).astype(dtype)
1212+
mod_pad = []
1213+
mod_data = data
1214+
for axis, (pad_x, pad_y) in enumerate(pad):
1215+
indices = range(dshape[axis])
1216+
if pad_x < 0:
1217+
indices = indices[abs(pad_x) :]
1218+
pad_x = 0
1219+
if pad_y < 0:
1220+
indices = indices[:pad_y]
1221+
pad_y = 0
1222+
mod_data = np.take(mod_data, indices, axis)
1223+
mod_pad.append((pad_x, pad_y))
1224+
1225+
ref_res = np.pad(mod_data, tuple(mod_pad), "constant")
1226+
for target, ctx in tvm.testing.enabled_targets():
1227+
intrp1 = relay.create_executor("graph", ctx=ctx, target=target)
1228+
op_res1 = intrp1.evaluate(func)(data)
1229+
tvm.testing.assert_allclose(op_res1.asnumpy(), ref_res, rtol=1e-5, atol=1e-5)
12031230

12041231
_test_run("float32")
12051232
_test_run("int32")

0 commit comments

Comments
 (0)