Skip to content

Commit

Permalink
Pad Operator Type Support (apache#12035)
Browse files Browse the repository at this point in the history
* fix no data type inference for pad

* add support for int types

* add tests for all types

* fix gpu type switch

* remove integer support

* fix python op test style issues

* fix type bug in python tests
  • Loading branch information
sbodenstein authored and nswamy committed Aug 20, 2018
1 parent 605c569 commit b03227d
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 10 deletions.
11 changes: 11 additions & 0 deletions src/operator/pad-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,17 @@ class PadProp : public OperatorProperty {
return param_.__DICT__();
}

bool InferType(std::vector<int> *in_type,
std::vector<int> *out_type,
std::vector<int> *aux_type) const override {
int dtype = (*in_type)[0];
type_assign(&dtype, (*out_type)[0]);

TYPE_ASSIGN_CHECK(*in_type, 0, dtype);
TYPE_ASSIGN_CHECK(*out_type, 0, dtype);
return dtype != -1;
}

bool InferShape(std::vector<TShape> *in_shape, std::vector<TShape> *out_shape,
std::vector<TShape> *aux_shape) const override {
using namespace mshadow;
Expand Down
24 changes: 14 additions & 10 deletions tests/python/unittest/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3008,16 +3008,16 @@ def test_roipooling():
numeric_eps=1e-4, rtol=1e-1, atol=1E-4)


def check_pad_with_shape(shape, xpu, pad_width, mode):
def check_pad_with_shape(shape, xpu, pad_width, mode, dtype="float64"):
# bind with label
X = mx.symbol.Variable('X')
X = mx.symbol.Variable('X', dtype=dtype)
Y = mx.symbol.Pad(data=X, mode=mode, pad_width=pad_width)
x = mx.random.uniform(-1, 1, shape, ctx=mx.cpu()).copyto(xpu)
x = mx.random.uniform(-1, 1, shape, ctx=mx.cpu(), dtype=dtype).copyto(xpu)
# numpy result
pad_grouped = list(zip(*[iter(list(pad_width))] * 2))
np_out = np.pad(x.asnumpy(), pad_grouped, mode)
# mxnet result
grad = mx.nd.empty(shape, ctx = xpu)
grad = mx.nd.empty(shape, ctx = xpu, dtype=dtype)
exec1 = Y.bind(xpu, args = [x], args_grad = {'X': grad})
exec1.forward(is_train=True)
out = exec1.outputs[0].asnumpy()
Expand All @@ -3029,16 +3029,20 @@ def check_pad_with_shape(shape, xpu, pad_width, mode):

@with_seed()
def test_pad():
ctx = default_context()
shape1 = (2, 3, 3, 5)
pad1 = (0, 0, 0, 0, 1, 2, 3, 4)
shape2 = (2, 3, 3, 5, 4)
pad2 = (0, 0, 0, 0, 1, 2, 3, 4, 3, 1)
check_pad_with_shape(shape1, default_context(), pad1, 'constant')
check_pad_with_shape(shape1, default_context(), pad1, 'edge')
check_pad_with_shape(shape2, default_context(), pad2, 'constant')
check_pad_with_shape(shape2, default_context(), pad2, 'edge')
check_pad_with_shape(shape1, default_context(), pad1, 'reflect')
check_pad_with_shape(shape2, default_context(), pad2, 'reflect')
# note: this op doesn't support ints yet. Add tests when supported
dtypes = ["float16", "float32", "float64"]
for dtype in dtypes:
check_pad_with_shape(shape1, ctx, pad1, 'constant', dtype)
check_pad_with_shape(shape1, ctx, pad1, 'edge', dtype)
check_pad_with_shape(shape2, ctx, pad2, 'constant', dtype)
check_pad_with_shape(shape2, ctx, pad2, 'edge', dtype)
check_pad_with_shape(shape1, ctx, pad1, 'reflect', dtype)
check_pad_with_shape(shape2, ctx, pad2, 'reflect', dtype)


def np_instance_norm(data, weight, bias, eps):
Expand Down

0 comments on commit b03227d

Please sign in to comment.