Skip to content

Commit 1cb511d

Browse files
committed
[pnorm] remove unused variables
1 parent 059f42d commit 1cb511d

File tree

1 file changed

+0
-8
lines changed

1 file changed

+0
-8
lines changed

paddle/fluid/operators/p_norm_op.cu

-8
Original file line numberDiff line numberDiff line change
@@ -160,11 +160,9 @@ class PnormCUDAKernel : public framework::OpKernel<T> {
160160
auto ndim = out_norm->dims();
161161
float porder = ctx.Attr<float>("porder");
162162
int axis = ctx.Attr<int>("axis");
163-
bool asvector = ctx.Attr<bool>("asvector");
164163
if (axis < 0) axis = xdim.size() + axis;
165164
std::vector<int> reduce_axis = {axis};
166165

167-
auto& dev_ctx = ctx.cuda_device_context();
168166
auto stream = ctx.cuda_device_context().stream();
169167

170168
using MT = typename details::MPTypeTrait<T>::Type;
@@ -246,20 +244,14 @@ class PnormGradCUDAKernel : public framework::OpKernel<T> {
246244
ctx.Input<framework::Tensor>(framework::GradVarName("Out"));
247245
auto* out_dx = ctx.Output<framework::Tensor>(framework::GradVarName("X"));
248246
T* dx = out_dx->mutable_data<T>(ctx.GetPlace());
249-
const T* x = in_x->data<T>();
250-
const T* x_norm = in_norm->data<T>();
251-
const T* norm_dy = in_norm_dy->data<T>();
252247

253248
auto xdim = in_x->dims();
254249
float porder = ctx.Attr<float>("porder");
255-
T eps = static_cast<T>(ctx.Attr<float>("epsilon"));
256250
int axis = ctx.Attr<int>("axis");
257251
bool reduce_all = ((axis < 0) || (in_norm->numel() == 1));
258-
bool asvector = ctx.Attr<bool>("asvector");
259252
if (axis < 0) axis = xdim.size() + axis;
260253
const std::vector<int> dims = {axis};
261254

262-
auto& dev_ctx = ctx.cuda_device_context();
263255
auto& cuda_ctx = ctx.template device_context<DeviceContext>();
264256

265257
if (porder == 0) {

0 commit comments

Comments
 (0)