Skip to content

Commit 1338865

Browse files
committed
[TOP] Level 3 complete (apache#7)
1 parent 31eb2c5 commit 1338865

File tree

15 files changed

+665
-258
lines changed

15 files changed

+665
-258
lines changed

nnvm/CMakeLists.txt

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,10 @@ mxnet_source_group("Source\\core" GLOB "src/core/*.cc")
5353
mxnet_source_group("Source\\pass" GLOB "src/pass/*.cc")
5454

5555

56-
FILE(GLOB_RECURSE SOURCE "src/*.cc" "src/*.h" "include/*.h")
56+
FILE(GLOB_RECURSE SOURCE
57+
src/c_api/*.cc
58+
src/core/*.cc
59+
src/pass/*.cc)
5760

5861
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/dmlc-core/CMakeLists.txt)
5962
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/dmlc-core/include)

nnvm/Makefile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,10 +51,10 @@ else
5151
NO_WHOLE_ARCH= --no-whole-archive
5252
endif
5353

54-
all: lib/libnnvm.a lib/libnnvm_example.$(SHARED_LIBRARY_SUFFIX)
54+
all: lib/libnnvm.a lib/libnnvm_top.$(SHARED_LIBRARY_SUFFIX)
5555

5656
SRC = $(wildcard src/*.cc src/c_api/*.cc src/core/*.cc src/pass/*.cc)
57-
SRC_TOP = $(wildcard src/top/*.cc)
57+
SRC_TOP = $(wildcard src/top/*.cc, src/top/*/*.cc)
5858
ALL_OBJ = $(patsubst %.cc, build/%.o, $(SRC))
5959
TOP_OBJ = $(patsubst %.cc, build/%.o, $(SRC_TOP))
6060
ALL_DEP = $(ALL_OBJ)
@@ -72,7 +72,7 @@ lib/libnnvm.a: $(ALL_DEP)
7272
@mkdir -p $(@D)
7373
ar crv $@ $(filter %.o, $?)
7474

75-
lib/libnnvm_example.$(SHARED_LIBRARY_SUFFIX): lib/libnnvm.a ${TOP_OBJ}
75+
lib/libnnvm_top.$(SHARED_LIBRARY_SUFFIX): lib/libnnvm.a ${TOP_OBJ}
7676
@mkdir -p $(@D)
7777
$(CXX) $(CFLAGS) -shared -o $@ $(filter %.o, $^) $(LDFLAGS) -Wl,${WHOLE_ARCH} lib/libnnvm.a -Wl,${NO_WHOLE_ARCH}
7878

nnvm/include/nnvm/top/nn.h

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,15 @@ struct DropoutParam : public dmlc::Parameter<DropoutParam> {
5454

5555
struct BatchNormParam : public dmlc::Parameter<BatchNormParam> {
5656
int axis;
57-
float epsilon;
58-
float momentum;
57+
double epsilon;
58+
double momentum;
5959
bool center;
6060
bool scale;
6161

6262
DMLC_DECLARE_PARAMETER(BatchNormParam) {
6363
DMLC_DECLARE_FIELD(axis).set_default(1)
6464
.describe("Specify which shape axis the channel is specified.");
65-
DMLC_DECLARE_FIELD(epsilon).set_default(1e-5f)
65+
DMLC_DECLARE_FIELD(epsilon).set_default(1e-5)
6666
.describe("Small float added to variance to avoid dividing by zero.");
6767
DMLC_DECLARE_FIELD(center).set_default(true)
6868
.describe("If True, add offset of `beta` to normalized tensor."
@@ -81,21 +81,23 @@ struct BatchNormParam : public dmlc::Parameter<BatchNormParam> {
8181
static const constexpr int kMovingVariance = 4;
8282
};
8383

84+
85+
// Shared by softmax and log_softmax
8486
struct SoftmaxParam : public dmlc::Parameter<SoftmaxParam> {
8587
int axis;
8688

8789
DMLC_DECLARE_PARAMETER(SoftmaxParam) {
8890
DMLC_DECLARE_FIELD(axis).set_default(-1)
89-
.describe("The axis to sum over when computing softmax.");
91+
.describe("The axis to sum over when computing softmax.");
9092
}
9193
};
9294

93-
struct LogSoftmaxParam : public dmlc::Parameter<LogSoftmaxParam> {
94-
int axis;
95+
struct LeakyReLUParam : public dmlc::Parameter<LeakyReLUParam> {
96+
double alpha;
9597

96-
DMLC_DECLARE_PARAMETER(LogSoftmaxParam) {
97-
DMLC_DECLARE_FIELD(axis).set_default(-1)
98-
.describe("The axis to sum over when computing softmax.");
98+
DMLC_DECLARE_PARAMETER(LeakyReLUParam) {
99+
DMLC_DECLARE_FIELD(alpha).set_lower_bound(0.0).set_default(0.25)
100+
.describe("slope coefficient for the negative half axis.");
99101
}
100102
};
101103

nnvm/include/nnvm/top/tensor.h

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,24 @@ struct CastParam : public dmlc::Parameter<CastParam> {
4040
}
4141
};
4242

43+
struct ReshapeParam : public dmlc::Parameter<ReshapeParam> {
44+
Tuple<int64_t> shape;
45+
46+
DMLC_DECLARE_PARAMETER(ReshapeParam) {
47+
DMLC_DECLARE_FIELD(shape);
48+
}
49+
};
50+
51+
struct ScalarParam : public dmlc::Parameter<ScalarParam> {
52+
double scalar;
53+
54+
DMLC_DECLARE_PARAMETER(ScalarParam) {
55+
DMLC_DECLARE_FIELD(scalar);
56+
}
57+
};
58+
59+
60+
4361
} // namespace top
4462
} // namespace nnvm
4563

nnvm/python/nnvm/libinfo.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def find_lib_path():
2626
if hasattr(__builtin__, "NNVM_LIBRARY_NAME"):
2727
lib_name = __builtin__.NNVM_LIBRARY_NAME
2828
else:
29-
lib_name = "libnnvm_example"
29+
lib_name = "libnnvm_top"
3030

3131
api_path = os.path.join(base_path, '../../lib/')
3232
cmake_build_path = os.path.join(base_path, '../../build/Release/')

nnvm/src/top/nn.cc renamed to nnvm/src/top/nn/nn.cc

Lines changed: 40 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77
#include <nnvm/node.h>
88
#include <nnvm/op_attr_types.h>
99
#include <nnvm/top/nn.h>
10-
#include "./op_common.h"
11-
#include "./elemwise_op_common.h"
10+
#include "../op_common.h"
11+
#include "../elemwise_op_common.h"
1212

1313
namespace nnvm {
1414
namespace top {
@@ -126,6 +126,25 @@ NNVM_REGISTER_OP(dropout)
126126
// batchnorm
127127
DMLC_REGISTER_PARAMETER(BatchNormParam);
128128

129+
inline bool BatchNormInferShape(const nnvm::NodeAttrs& attrs,
130+
std::vector<TShape> *in_shape,
131+
std::vector<TShape> *out_shape) {
132+
CHECK_EQ(in_shape->size(), 5U)
133+
<< "Input:[data, gamma, beta, moving_mean, moving_var]";
134+
CHECK_EQ(out_shape->size(), 3U);
135+
const TShape &dshape = in_shape->at(0);
136+
if (dshape.ndim() == 0) return false;
137+
TShape bshape({dshape[1]});
138+
NNVM_ASSIGN_INPUT_SHAPE(attrs, *in_shape, 1, bshape);
139+
NNVM_ASSIGN_INPUT_SHAPE(attrs, *in_shape, 2, bshape);
140+
NNVM_ASSIGN_INPUT_SHAPE(attrs, *in_shape, 3, bshape);
141+
NNVM_ASSIGN_INPUT_SHAPE(attrs, *in_shape, 4, bshape);
142+
NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_shape, 0, dshape);
143+
NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_shape, 1, bshape);
144+
NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_shape, 2, bshape);
145+
return true;
146+
}
147+
129148
NNVM_REGISTER_OP(batch_norm)
130149
.describe(R"(Batch normalization layer (Ioffe and Szegedy, 2014).
131150
Normalizes the input at each batch, i.e. applies a transformation
@@ -167,6 +186,8 @@ axis to be the last item in the input shape.
167186
.set_num_inputs(5)
168187
.set_num_outputs(3)
169188
.set_attr_parser(ParamParser<BatchNormParam>)
189+
.set_attr<FInferShape>("FInferShape", BatchNormInferShape)
190+
.set_attr<FInferType>("FInferType", ElemwiseType<5, 3>)
170191
.set_attr<FListInputNames>("FListInputNames", [](const NodeAttrs& attrs) {
171192
return std::vector<std::string>{"data", "gamma", "beta", "moving_mean", "moving_var"};
172193
})
@@ -198,8 +219,6 @@ NNVM_REGISTER_OP(softmax)
198219
.set_support_level(1);
199220

200221
// log_softmax
201-
DMLC_REGISTER_PARAMETER(LogSoftmaxParam);
202-
203222
NNVM_REGISTER_OP(log_softmax)
204223
.describe(R"code(Computes softmax.
205224
@@ -208,7 +227,23 @@ NNVM_REGISTER_OP(log_softmax)
208227
)code" NNVM_ADD_FILELINE)
209228
.set_num_inputs(1)
210229
.set_num_outputs(1)
211-
.set_attr_parser(ParamParser<LogSoftmaxParam>)
230+
.set_attr_parser(ParamParser<SoftmaxParam>)
231+
.set_attr<FInferShape>("FInferShape", ElemwiseShape<1, 1>)
232+
.set_attr<FInferType>("FInferType", ElemwiseType<1, 1>)
233+
.set_support_level(1);
234+
235+
// leaky_rlu
236+
DMLC_REGISTER_PARAMETER(LeakyReLUParam);
237+
238+
NNVM_REGISTER_OP(leaky_relu)
239+
.describe(R"code(Leaky version of a Rectified Linear Unit.
240+
241+
`y = x > 0 ? x : alpha * x`
242+
243+
)code" NNVM_ADD_FILELINE)
244+
.set_num_inputs(1)
245+
.set_num_outputs(1)
246+
.set_attr_parser(ParamParser<LeakyReLUParam>)
212247
.set_attr<FInferShape>("FInferShape", ElemwiseShape<1, 1>)
213248
.set_attr<FInferType>("FInferType", ElemwiseType<1, 1>)
214249
.set_support_level(1);

0 commit comments

Comments
 (0)