Description
If the data is set as follow:
in_data0 = mx.nd.random.uniform(-5, 5, (512,100000), ctx=mx.gpu(0))
mx.symbol.L2Normalization will got 0.0, whether forward or backward.
if it is set in (-1, 1), that is OK. And in mx.symbol.norm, it is OK too.
Test code as follow:
import numpy as np
import mxnet as mx
from mxnet.test_utils import default_context, assert_almost_equal, check_numeric_gradient
import time
in_data0 = mx.nd.random.uniform(-5, 5, (512,100000), ctx=mx.gpu(0))
def check_l2_normalizationFP16(mode, dtype, norm_eps=1e-10, isfp16=False):
ctx = mx.gpu(0)
data = mx.symbol.Variable('data', dtype=dtype)
out = mx.symbol.L2Normalization(data=data, mode=mode, eps=norm_eps)
out = mx.sym.make_loss(out)
in_data = in_data0.astype(dtype)
a=time.time()
exe = out.simple_bind(ctx=ctx, data=in_data.shape, dtype=dtype)
output = exe.forward(is_train=True, data=in_data)
exe.backward()
symbolic_grads = exe.grad_dict['data'].asnumpy()
print('forw---',in_data.dtype,output[0].dtype,'--'+mode,'--')
print('grad---',in_data.dtype,symbolic_grads[0].dtype,'--'+mode,'--',100*(time.time()-a))
return output,symbolic_grads