|
|
|
@ -116,7 +116,7 @@ __all__ = [
|
|
|
|
|
'spp_layer',
|
|
|
|
|
'pad_layer',
|
|
|
|
|
'eos_layer',
|
|
|
|
|
'smooth_l1_cost',
|
|
|
|
|
'smooth_l1',
|
|
|
|
|
'layer_support',
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
@ -5283,7 +5283,7 @@ def multi_binary_label_cross_entropy(input,
|
|
|
|
|
|
|
|
|
|
@wrap_name_default()
|
|
|
|
|
@layer_support()
|
|
|
|
|
def smooth_l1_cost(input, label, name=None, layer_attr=None):
|
|
|
|
|
def smooth_l1(input, label, name=None, layer_attr=None):
|
|
|
|
|
"""
|
|
|
|
|
This is a L1 loss but more smooth. It requires that the
|
|
|
|
|
size of input and label are equal. The formula is as follows,
|
|
|
|
@ -5307,8 +5307,8 @@ def smooth_l1_cost(input, label, name=None, layer_attr=None):
|
|
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
cost = smooth_l1_cost(input=input_layer,
|
|
|
|
|
label=label_layer)
|
|
|
|
|
cost = smooth_l1(input=input_layer,
|
|
|
|
|
label=label_layer)
|
|
|
|
|
|
|
|
|
|
:param input: The input layer.
|
|
|
|
|
:type input: LayerOutput
|
|
|
|
|