add smooth_l1 interface to v2 doc.

feature/design_of_v2_layer_converter
dangqingqing 8 years ago
parent 6c654c012a
commit 2838491235

@ -419,6 +419,11 @@ hsigmoid
.. autoclass:: paddle.v2.layer.hsigmoid
:noindex:
smooth_l1
---------
.. automodule:: paddle.v2.layer.smooth_l1
:noindex:
Check Layer
============

@ -116,7 +116,7 @@ __all__ = [
'spp_layer',
'pad_layer',
'eos_layer',
'smooth_l1_cost',
'smooth_l1',
'layer_support',
]
@ -5283,7 +5283,7 @@ def multi_binary_label_cross_entropy(input,
@wrap_name_default()
@layer_support()
def smooth_l1_cost(input, label, name=None, layer_attr=None):
def smooth_l1(input, label, name=None, layer_attr=None):
"""
This is a L1 loss but more smooth. It requires that the
size of input and label are equal. The formula is as follows,
@ -5307,8 +5307,8 @@ def smooth_l1_cost(input, label, name=None, layer_attr=None):
.. code-block:: python
cost = smooth_l1_cost(input=input_layer,
label=label_layer)
cost = smooth_l1(input=input_layer,
label=label_layer)
:param input: The input layer.
:type input: LayerOutput

Loading…
Cancel
Save