Skip to content

Commit 5f2cd1a

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into add_program_proto
2 parents 4b948ab + 03651b0 commit 5f2cd1a

25 files changed

+1667
-219
lines changed

cmake/util.cmake

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ function(target_circle_link_libraries TARGET_NAME)
2525
endif()
2626
endforeach()
2727
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang")
28-
if(IOS AND NOT IOS_ENABLE_BITCODE)
28+
if(NOT IOS_ENABLE_BITCODE)
2929
list(APPEND LIBS "-undefined dynamic_lookup")
3030
endif()
3131
endif()

paddle/framework/ddim.cc

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -292,5 +292,13 @@ DDim flatten_to_2d(const DDim& src, int num_col_dims) {
292292

293293
DDim flatten_to_1d(const DDim& src) { return make_ddim({product(src)}); }
294294

295+
DDim stride(const DDim& ddim) {
296+
std::vector<int64_t> strides(ddim.size());
297+
strides[ddim.size() - 1] = 1;
298+
for (int i = ddim.size() - 2; i >= 0; --i) {
299+
strides[i] = strides[i + 1] * ddim[i + 1];
300+
}
301+
return framework::make_ddim(strides);
302+
}
295303
} // namespace framework
296304
} // namespace paddle

paddle/framework/ddim.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,7 @@ DDim flatten_to_2d(const DDim& src, int num_col_dims);
121121

122122
DDim flatten_to_1d(const DDim& src);
123123

124+
DDim stride(const DDim& ddim);
124125
} // namespace framework
125126
} // namespace paddle
126127

paddle/operators/CMakeLists.txt

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,13 @@ function(op_library TARGET)
5555
set(pybind_flag 1)
5656
endif()
5757

58+
# activation_op contains several operators
59+
if ("${TARGET}" STREQUAL "activation_op")
60+
set(pybind_flag 1)
61+
# It's enough to just adding one operator to pybind
62+
file(APPEND ${pybind_file} "USE_OP(sigmoid);\n")
63+
endif()
64+
5865
# pybind USE_NO_KERNEL_OP
5966
file(READ ${TARGET}.cc TARGET_CONTENT)
6067
string(REGEX MATCH "OperatorWithKernel" regex_result "${TARGET_CONTENT}")

paddle/operators/activation_op.cc

Lines changed: 306 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,306 @@
1+
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#include "paddle/operators/activation_op.h"
16+
17+
namespace paddle {
18+
namespace operators {
19+
20+
class ActivationOp : public framework::OperatorWithKernel {
21+
public:
22+
using framework::OperatorWithKernel::OperatorWithKernel;
23+
24+
protected:
25+
void InferShape(const framework::InferShapeContext &ctx) const override {
26+
ctx.Output<framework::LoDTensor>("Y")->Resize(
27+
ctx.Input<framework::Tensor>("X")->dims());
28+
}
29+
};
30+
31+
class ActivationOpGrad : public framework::OperatorWithKernel {
32+
public:
33+
using framework::OperatorWithKernel::OperatorWithKernel;
34+
35+
protected:
36+
void InferShape(const framework::InferShapeContext &ctx) const override {
37+
ctx.Output<framework::LoDTensor>(framework::GradVarName("X"))
38+
->Resize(ctx.Input<framework::Tensor>("Y")->dims());
39+
}
40+
};
41+
42+
class SigmoidOpMaker : public framework::OpProtoAndCheckerMaker {
43+
public:
44+
SigmoidOpMaker(framework::OpProto *proto,
45+
framework::OpAttrChecker *op_checker)
46+
: OpProtoAndCheckerMaker(proto, op_checker) {
47+
AddInput("X", "Input of Sigmoid operator");
48+
AddOutput("Y", "Output of Sigmoid operator");
49+
AddComment("Sigmoid activation operator, sigmoid = 1 / (1 + exp(-x))");
50+
}
51+
};
52+
53+
class ExpOpMaker : public framework::OpProtoAndCheckerMaker {
54+
public:
55+
ExpOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
56+
: OpProtoAndCheckerMaker(proto, op_checker) {
57+
AddInput("X", "Input of Exp operator");
58+
AddOutput("Y", "Output of Exp operator");
59+
AddComment("Exp activation operator, exp(x) = e^x");
60+
}
61+
};
62+
63+
class ReluOpMaker : public framework::OpProtoAndCheckerMaker {
64+
public:
65+
ReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
66+
: OpProtoAndCheckerMaker(proto, op_checker) {
67+
AddInput("X", "Input of Relu operator");
68+
AddOutput("Y", "Output of Relu operator");
69+
AddComment("Relu activation operator, relu(x) = max(x, 0)");
70+
}
71+
};
72+
73+
class TanhOpMaker : public framework::OpProtoAndCheckerMaker {
74+
public:
75+
TanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
76+
: OpProtoAndCheckerMaker(proto, op_checker) {
77+
AddInput("X", "Input of Tanh operator");
78+
AddOutput("Y", "Output of Tanh operator");
79+
AddComment(
80+
"Tanh activation operator, tanh = (exp(x) - exp(-x)) / (exp(x) + "
81+
"exp(-x))");
82+
}
83+
};
84+
85+
class SqrtOpMaker : public framework::OpProtoAndCheckerMaker {
86+
public:
87+
SqrtOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
88+
: OpProtoAndCheckerMaker(proto, op_checker) {
89+
AddInput("X", "Input of Sqrt operator");
90+
AddOutput("Y", "Output of Sqrt operator");
91+
AddComment("Sqrt activation operator, sqrt(x) = x^(1/2)");
92+
}
93+
};
94+
95+
class AbsOpMaker : public framework::OpProtoAndCheckerMaker {
96+
public:
97+
AbsOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
98+
: OpProtoAndCheckerMaker(proto, op_checker) {
99+
AddInput("X", "Input of Abs operator");
100+
AddOutput("Y", "Output of Abs operator");
101+
AddComment("Abs activation operator, abs(x) = |x|");
102+
}
103+
};
104+
105+
class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker {
106+
public:
107+
ReciprocalOpMaker(framework::OpProto *proto,
108+
framework::OpAttrChecker *op_checker)
109+
: OpProtoAndCheckerMaker(proto, op_checker) {
110+
AddInput("X", "Input of Reciprocal operator");
111+
AddOutput("Y", "Output of Reciprocal operator");
112+
AddComment("Reciprocal activation operator, reciprocal(x) = 1 / x");
113+
}
114+
};
115+
116+
class LogOpMaker : public framework::OpProtoAndCheckerMaker {
117+
public:
118+
LogOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
119+
: OpProtoAndCheckerMaker(proto, op_checker) {
120+
AddInput("X", "Input of Log operator");
121+
AddOutput("Y", "Output of Log operator");
122+
AddComment("Log activation operator, log(x) = natural logarithm of x");
123+
}
124+
};
125+
126+
class SquareOpMaker : public framework::OpProtoAndCheckerMaker {
127+
public:
128+
SquareOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
129+
: OpProtoAndCheckerMaker(proto, op_checker) {
130+
AddInput("X", "Input of Square operator");
131+
AddOutput("Y", "Output of Square operator");
132+
AddComment("Square activation operator, square(x) = x^2");
133+
}
134+
};
135+
136+
template <typename AttrType>
137+
class BReluOpMaker : public framework::OpProtoAndCheckerMaker {
138+
public:
139+
BReluOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
140+
: OpProtoAndCheckerMaker(proto, op_checker) {
141+
AddInput("X", "Input of BRelu operator");
142+
AddOutput("Y", "Output of BRelu operator");
143+
AddComment("BRelu activation operator, brelu = max(min(x, t_min), t_max)");
144+
AddAttr<AttrType>("t_min", "The min marginal value of BRelu")
145+
.SetDefault(static_cast<AttrType>(0));
146+
AddAttr<AttrType>("t_max", "The max marginal value of BRelu")
147+
.SetDefault(static_cast<AttrType>(24));
148+
}
149+
};
150+
151+
template <typename AttrType>
152+
class SoftReluOpMaker : public framework::OpProtoAndCheckerMaker {
153+
public:
154+
SoftReluOpMaker(framework::OpProto *proto,
155+
framework::OpAttrChecker *op_checker)
156+
: OpProtoAndCheckerMaker(proto, op_checker) {
157+
AddInput("X", "Input of SoftRelu operator");
158+
AddOutput("Y", "Output of SoftRelu operator");
159+
AddComment(
160+
"SoftRelu activation operator, soft_relu = log(1 + exp(max(min(x, "
161+
"threshold), threshold)))");
162+
AddAttr<AttrType>("threshold", "The threshold value of SoftRelu")
163+
.SetDefault(static_cast<AttrType>(40));
164+
}
165+
};
166+
167+
template <typename AttrType>
168+
class PowOpMaker : public framework::OpProtoAndCheckerMaker {
169+
public:
170+
PowOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
171+
: OpProtoAndCheckerMaker(proto, op_checker) {
172+
AddInput("X", "Input of Pow operator");
173+
AddOutput("Y", "Output of Pow operator");
174+
AddComment("Pow activation operator, pow(x, factor) = x^factor");
175+
AddAttr<AttrType>("factor", "The exponential factor of Pow")
176+
.SetDefault(static_cast<AttrType>(1));
177+
}
178+
};
179+
180+
template <typename AttrType>
181+
class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
182+
public:
183+
STanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
184+
: OpProtoAndCheckerMaker(proto, op_checker) {
185+
AddInput("X", "Input of STanh operator");
186+
AddOutput("Y", "Output of STanh operator");
187+
AddComment("STanh activation operator, stanh = b * tanh(a * x)");
188+
AddAttr<AttrType>("scale_a", "The scale parameter of a for the input")
189+
.SetDefault(static_cast<AttrType>(2 / 3));
190+
AddAttr<AttrType>("scale_b", "The scale parameter of b for the input")
191+
.SetDefault(static_cast<AttrType>(1.7159));
192+
}
193+
};
194+
195+
} // namespace operators
196+
} // namespace paddle
197+
198+
namespace ops = paddle::operators;
199+
REGISTER_OP(sigmoid, ops::ActivationOp, ops::SigmoidOpMaker, sigmoid_grad,
200+
ops::ActivationOpGrad);
201+
REGISTER_OP_CPU_KERNEL(sigmoid,
202+
ops::ActivationKernel<paddle::platform::CPUPlace, float,
203+
ops::SigmoidFunctor<float>>);
204+
REGISTER_OP_CPU_KERNEL(
205+
sigmoid_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
206+
ops::SigmoidGradFunctor<float>>);
207+
208+
REGISTER_OP(exp, ops::ActivationOp, ops::ExpOpMaker, exp_grad,
209+
ops::ActivationOpGrad);
210+
REGISTER_OP_CPU_KERNEL(
211+
exp,
212+
ops::ActivationKernel<paddle::platform::CPUPlace, float, ops::ExpFunctor>);
213+
REGISTER_OP_CPU_KERNEL(exp_grad,
214+
ops::ActivationGradKernel<paddle::platform::CPUPlace,
215+
float, ops::ExpGradFunctor>);
216+
217+
REGISTER_OP(relu, ops::ActivationOp, ops::ReluOpMaker, relu_grad,
218+
ops::ActivationOpGrad);
219+
REGISTER_OP_CPU_KERNEL(relu,
220+
ops::ActivationKernel<paddle::platform::CPUPlace, float,
221+
ops::ReluFunctor<float>>);
222+
REGISTER_OP_CPU_KERNEL(
223+
relu_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
224+
ops::ReluGradFunctor<float>>);
225+
226+
REGISTER_OP(tanh, ops::ActivationOp, ops::TanhOpMaker, tanh_grad,
227+
ops::ActivationOpGrad);
228+
REGISTER_OP_CPU_KERNEL(
229+
tanh,
230+
ops::ActivationKernel<paddle::platform::CPUPlace, float, ops::TanhFunctor>);
231+
REGISTER_OP_CPU_KERNEL(
232+
tanh_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
233+
ops::TanhGradFunctor<float>>);
234+
235+
REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad,
236+
ops::ActivationOpGrad);
237+
REGISTER_OP_CPU_KERNEL(
238+
sqrt,
239+
ops::ActivationKernel<paddle::platform::CPUPlace, float, ops::SqrtFunctor>);
240+
REGISTER_OP_CPU_KERNEL(
241+
sqrt_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
242+
ops::SqrtGradFunctor<float>>);
243+
244+
REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad,
245+
ops::ActivationOpGrad);
246+
REGISTER_OP_CPU_KERNEL(
247+
abs,
248+
ops::ActivationKernel<paddle::platform::CPUPlace, float, ops::AbsFunctor>);
249+
REGISTER_OP_CPU_KERNEL(abs_grad,
250+
ops::ActivationGradKernel<paddle::platform::CPUPlace,
251+
float, ops::AbsGradFunctor>);
252+
253+
REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker,
254+
reciprocal_grad, ops::ActivationOpGrad);
255+
REGISTER_OP_CPU_KERNEL(reciprocal,
256+
ops::ActivationKernel<paddle::platform::CPUPlace, float,
257+
ops::ReciprocalFunctor<float>>);
258+
REGISTER_OP_CPU_KERNEL(
259+
reciprocal_grad,
260+
ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
261+
ops::ReciprocalGradFunctor<float>>);
262+
263+
REGISTER_OP(log, ops::ActivationOp, ops::LogOpMaker, log_grad,
264+
ops::ActivationOpGrad);
265+
REGISTER_OP_CPU_KERNEL(
266+
log,
267+
ops::ActivationKernel<paddle::platform::CPUPlace, float, ops::LogFunctor>);
268+
REGISTER_OP_CPU_KERNEL(
269+
log_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
270+
ops::LogGradFunctor<float>>);
271+
272+
REGISTER_OP(square, ops::ActivationOp, ops::SquareOpMaker, square_grad,
273+
ops::ActivationOpGrad);
274+
REGISTER_OP_CPU_KERNEL(square,
275+
ops::ActivationKernel<paddle::platform::CPUPlace, float,
276+
ops::SquareFunctor>);
277+
REGISTER_OP_CPU_KERNEL(
278+
square_grad, ops::ActivationGradKernel<paddle::platform::CPUPlace, float,
279+
ops::SquareGradFunctor<float>>);
280+
281+
REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker<float>, brelu_grad,
282+
ops::ActivationOpGrad);
283+
REGISTER_OP_CPU_KERNEL(brelu,
284+
ops::BReluKernel<paddle::platform::CPUPlace, float>);
285+
REGISTER_OP_CPU_KERNEL(brelu_grad,
286+
ops::BReluGradKernel<paddle::platform::CPUPlace, float>);
287+
288+
REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker<float>,
289+
soft_relu_grad, ops::ActivationOpGrad);
290+
REGISTER_OP_CPU_KERNEL(soft_relu,
291+
ops::SoftReluKernel<paddle::platform::CPUPlace, float>);
292+
REGISTER_OP_CPU_KERNEL(
293+
soft_relu_grad, ops::SoftReluGradKernel<paddle::platform::CPUPlace, float>);
294+
295+
REGISTER_OP(pow, ops::ActivationOp, ops::PowOpMaker<float>, pow_grad,
296+
ops::ActivationOpGrad);
297+
REGISTER_OP_CPU_KERNEL(pow, ops::PowKernel<paddle::platform::CPUPlace, float>);
298+
REGISTER_OP_CPU_KERNEL(pow_grad,
299+
ops::PowGradKernel<paddle::platform::CPUPlace, float>);
300+
301+
REGISTER_OP(stanh, ops::ActivationOp, ops::STanhOpMaker<float>, stanh_grad,
302+
ops::ActivationOpGrad);
303+
REGISTER_OP_CPU_KERNEL(stanh,
304+
ops::STanhKernel<paddle::platform::CPUPlace, float>);
305+
REGISTER_OP_CPU_KERNEL(stanh_grad,
306+
ops::STanhGradKernel<paddle::platform::CPUPlace, float>);

0 commit comments

Comments
 (0)