|
|
|
@ -5286,10 +5286,7 @@ def multi_binary_label_cross_entropy(input,
|
|
|
|
|
def smooth_l1_cost(input, label, name=None, layer_attr=None):
|
|
|
|
|
"""
|
|
|
|
|
This is a L1 loss but more smooth. It requires that the
|
|
|
|
|
size of input and label are equal.
|
|
|
|
|
|
|
|
|
|
More details can be found by referring to `Fast R-CNN
|
|
|
|
|
<https://arxiv.org/pdf/1504.08083v2.pdf>`_
|
|
|
|
|
size of input and label are equal. The formula is as follows,
|
|
|
|
|
|
|
|
|
|
.. math::
|
|
|
|
|
|
|
|
|
@ -5305,6 +5302,9 @@ def smooth_l1_cost(input, label, name=None, layer_attr=None):
|
|
|
|
|
|x|-0.5& \text{otherwise}
|
|
|
|
|
\end{cases}
|
|
|
|
|
|
|
|
|
|
More details can be found by referring to `Fast R-CNN
|
|
|
|
|
<https://arxiv.org/pdf/1504.08083v2.pdf>`_
|
|
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
cost = smooth_l1_cost(input=input_layer,
|
|
|
|
|