Skip to content

Commit 08cee15

Browse files
committed
add NLLLoss url
1 parent f3d3e74 commit 08cee15

File tree

2 files changed

+61
-54
lines changed

2 files changed

+61
-54
lines changed

python/paddle/nn/functional/loss.py

Lines changed: 34 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -68,14 +68,19 @@
6868
]
6969

7070

71-
def nll_loss(x, label, weight=None, ignore_index=-100, reduction='mean'):
71+
def nll_loss(x,
72+
label,
73+
weight=None,
74+
ignore_index=-100,
75+
reduction='mean',
76+
name=None):
7277
"""
7378
This api returns negative log likelihood.
74-
See more detail in `paddle.nn.loss.NLLLoss`.
75-
79+
See more detail in :ref:`api_nn_loss_NLLLoss` .
80+
7681
Parameters:
7782
x (Tensor): Input tensor, the data type is float32, float64.
78-
label (Tensor): Label tensor, the data type is int64_t.
83+
label (Tensor): Label tensor, the data type is int64.
7984
weight (Tensor, optional): Weight tensor, a manual rescaling weight given
8085
to each class. If given, it has to be a Tensor of size `C`. Otherwise,
8186
it treated as if having all ones. the data type is
@@ -85,48 +90,35 @@ def nll_loss(x, label, weight=None, ignore_index=-100, reduction='mean'):
8590
reduction (str, optional): Indicate how to average the loss,
8691
the candicates are ``'none'`` | ``'mean'`` | ``'sum'``.
8792
If :attr:`reduction` is ``'mean'``, the reduced mean loss is returned;
93+
:attr:`reduction` is ``'sum'``, the reduced sum loss is returned;
94+
:attr:`reduction` is ``'none'``, no reduction will be apllied.
8895
Default is ``'mean'``.
89-
96+
name (str, optional): Name for the operation (optional, default is None).
97+
For more information, please refer to :ref:`api_guide_Name`.
98+
9099
Returns:
91100
The tensor variable storing the nll_loss.
92-
101+
93102
Examples:
94-
import paddle
95-
import numpy as np
96-
from paddle.nn.functional import nll_loss
97-
log_softmax = paddle.nn.LogSoftmax(axis=1)
98-
99-
x_np = np.random.random(size=(10, 10)).astype(np.float32)
100-
label_np = np.random.randint(0, 10, size=(10,)).astype(np.int64)
101-
102-
place = paddle.CPUPlace()
103-
104-
# imperative mode
105-
paddle.enable_imperative(place)
106-
x = paddle.imperative.to_variable(x_np)
107-
log_out = log_softmax(x)
108-
label = paddle.imperative.to_variable(label_np)
109-
imperative_result = nll_loss(log_out, label)
110-
print(imperative_result.numpy())
111-
112-
# declarative mode
113-
paddle.disable_imperative()
114-
prog = paddle.Program()
115-
startup_prog = paddle.Program()
116-
with paddle.program_guard(prog, startup_prog):
117-
x = paddle.nn.data(name='x', shape=[10, 10], dtype='float32')
118-
label = paddle.nn.data(name='label', shape=[10], dtype='int64')
119-
log_out = log_softmax(x)
120-
res = nll_loss(log_out, label)
121-
122-
exe = paddle.Executor(place)
123-
declaritive_result = exe.run(
124-
prog,
125-
feed={"x": x_np,
126-
"label": label_np},
127-
fetch_list=[res])
128-
print(declaritive_result)
103+
.. code-block:: python
104+
105+
import paddle
106+
import numpy as np
107+
from paddle.nn.functional import nll_loss
108+
log_softmax = paddle.nn.LogSoftmax(axis=1)
129109
110+
x_np = np.random.random(size=(10, 10)).astype(np.float32)
111+
label_np = np.random.randint(0, 10, size=(10,)).astype(np.int64)
112+
113+
place = paddle.CPUPlace()
114+
115+
# imperative mode
116+
paddle.enable_imperative(place)
117+
x = paddle.imperative.to_variable(x_np)
118+
log_out = log_softmax(x)
119+
label = paddle.imperative.to_variable(label_np)
120+
imperative_result = nll_loss(log_out, label)
121+
print(imperative_result.numpy())
130122
"""
131123
if reduction not in ['sum', 'mean', 'none']:
132124
raise ValueError(
@@ -152,6 +144,7 @@ def nll_loss(x, label, weight=None, ignore_index=-100, reduction='mean'):
152144
out, _ = core.ops.reshape2(out, 'shape', out_shape)
153145
return out
154146

147+
helper = LayerHelper('nll_loss', **locals())
155148
x_shape = list(x.shape)
156149
x_dims = len(x_shape)
157150
if x_dims < 2:
@@ -165,7 +158,6 @@ def nll_loss(x, label, weight=None, ignore_index=-100, reduction='mean'):
165158
label = paddle.reshape(label, shape=[n, 1, -1])
166159
out_shape = [n] + x_shape[2:]
167160

168-
helper = LayerHelper('nll_loss', **locals())
169161
fluid.data_feeder.check_variable_and_dtype(x, 'x', ['float32', 'float64'],
170162
'nll_loss')
171163
fluid.data_feeder.check_variable_and_dtype(label, 'label', ['int64'],

python/paddle/nn/layer/loss.py

Lines changed: 27 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -506,8 +506,6 @@ class NLLLoss(fluid.dygraph.Layer):
506506
\\end{cases}
507507
508508
Parameters:
509-
x (Tensor): Input tensor, the data type is float32, float64.
510-
label (Tensor): Label tensor, the data type is int64_t.
511509
weight (Tensor, optional): Weight tensor, a manual rescaling weight given
512510
to each class. If given, it has to be a Tensor of size `C`. Otherwise,
513511
it treated as if having all ones. the data type is
@@ -516,27 +514,34 @@ class NLLLoss(fluid.dygraph.Layer):
516514
and does not contribute to the input gradient.
517515
reduction (str, optional): Indicate how to average the loss,
518516
the candicates are ``'none'`` | ``'mean'`` | ``'sum'``.
519-
If :attr:`reduction` is ``'mean'``, the reduced mean loss is returned;
517+
If :attr:`reduction` is ``'mean'``, the reduced mean loss is returned;
518+
:attr:`reduction` is ``'sum'``, the reduced sum loss is returned;
519+
:attr:`reduction` is ``'none'``, no reduction will be apllied.
520520
Default is ``'mean'``.
521+
name (str, optional): Name for the operation (optional, default is None).
522+
For more information, please refer to :ref:`api_guide_Name`.
523+
524+
Shape:
525+
x (Tensor): Input tensor, the data type is float32, float64.
526+
label (Tensor): Label tensor, the data type is int64.
521527
522528
Returns:
523-
The callable object which calculates negative log likelihood loss when
524-
get the input `x` and `label`.
529+
The callable object which calculates negative log likelihood loss.
525530
526531
Examples:
527-
528532
.. code-block:: python
533+
529534
import paddle
530535
import numpy as np
531-
536+
532537
nll_loss = paddle.nn.layer.NLLLoss()
533538
log_softmax = paddle.nn.LogSoftmax(axis=1)
534-
539+
535540
x_np = np.random.random(size=(10, 10)).astype(np.float32)
536541
label_np = np.random.randint(0, 10, size=(10,)).astype(np.int64)
537-
542+
538543
place = paddle.CPUPlace()
539-
544+
540545
# imperative mode
541546
paddle.enable_imperative(place)
542547
x = paddle.imperative.to_variable(x_np)
@@ -564,16 +569,26 @@ class NLLLoss(fluid.dygraph.Layer):
564569
print(declaritive_result)
565570
"""
566571

567-
def __init__(self, weight=None, ignore_index=-100, reduction='mean'):
572+
def __init__(self,
573+
weight=None,
574+
ignore_index=-100,
575+
reduction='mean',
576+
name=None):
577+
if reduction not in ['sum', 'mean', 'none']:
578+
raise ValueError(
579+
"The value of 'reduction' in nll_loss should be 'sum', 'mean' or "
580+
"'none', but received %s, which is not allowed." % reduction)
568581
super(NLLLoss, self).__init__()
569582
self.weight = weight
570583
self.ignore_index = ignore_index
571584
self.reduction = reduction
585+
self.name = name
572586

573587
def forward(self, x, label):
574588
return F.nll_loss(
575589
x,
576590
label,
577591
weight=self.weight,
578592
ignore_index=self.ignore_index,
579-
reduction=self.reduction)
593+
reduction=self.reduction,
594+
name=self.name)

0 commit comments

Comments
 (0)