|
|
|
@ -108,6 +108,7 @@ __all__ = [
|
|
|
|
|
'sum_cost',
|
|
|
|
|
'rank_cost',
|
|
|
|
|
'lambda_cost',
|
|
|
|
|
'huber_regression_cost',
|
|
|
|
|
'huber_classification_cost',
|
|
|
|
|
'block_expand_layer',
|
|
|
|
|
'maxout_layer',
|
|
|
|
@ -216,6 +217,7 @@ class LayerType(object):
|
|
|
|
|
|
|
|
|
|
RANK_COST = 'rank-cost'
|
|
|
|
|
LAMBDA_COST = 'lambda_cost'
|
|
|
|
|
HUBER_REGRESSION = 'huber_regression'
|
|
|
|
|
HUBER_CLASSIFICATION = 'huber_classification'
|
|
|
|
|
CROSS_ENTROPY = 'multi-class-cross-entropy'
|
|
|
|
|
CROSS_ENTROPY_WITH_SELFNORM = 'multi_class_cross_entropy_with_selfnorm'
|
|
|
|
@ -5603,6 +5605,57 @@ def sum_cost(input, name=None, layer_attr=None):
|
|
|
|
|
return LayerOutput(name, LayerType.SUM_COST, parents=[input], size=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@wrap_name_default()
|
|
|
|
|
@layer_support()
|
|
|
|
|
def huber_regression_cost(input,
|
|
|
|
|
label,
|
|
|
|
|
name=None,
|
|
|
|
|
delta=1.0,
|
|
|
|
|
coeff=1.0,
|
|
|
|
|
layer_attr=None):
|
|
|
|
|
"""
|
|
|
|
|
In statistics, the Huber loss is a loss function used in robust regression,
|
|
|
|
|
that is less sensitive to outliers in data than the squared error loss.
|
|
|
|
|
Given a prediction f(x), a label y and :math:`\delta`, the loss function
|
|
|
|
|
is defined as:
|
|
|
|
|
|
|
|
|
|
.. math:
|
|
|
|
|
loss = 0.5*\left ( y-f(x) \right )^2, \left | y-f(x) \right |\leq \delta
|
|
|
|
|
loss = \delta \left | y-f(x) \right |-0.5\delta ^2, otherwise
|
|
|
|
|
|
|
|
|
|
The example usage is:
|
|
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
cost = huber_regression_cost(input=input_layer, label=label_layer)
|
|
|
|
|
|
|
|
|
|
:param input: The first input layer.
|
|
|
|
|
:type input: LayerOutput.
|
|
|
|
|
:param label: The input label.
|
|
|
|
|
:type input: LayerOutput.
|
|
|
|
|
:param name: The name of this layers. It is not necessary.
|
|
|
|
|
:type name: None|basestring.
|
|
|
|
|
:param delta: The difference between the observed and predicted values.
|
|
|
|
|
:type delta: float.
|
|
|
|
|
:param coeff: The coefficient affects the gradient in the backward.
|
|
|
|
|
:type coeff: float.
|
|
|
|
|
:param layer_attr: Extra Layer Attribute.
|
|
|
|
|
:type layer_attr: ExtraLayerAttribute
|
|
|
|
|
:return: LayerOutput object.
|
|
|
|
|
:rtype: LayerOutput.
|
|
|
|
|
"""
|
|
|
|
|
assert isinstance(input, LayerOutput)
|
|
|
|
|
Layer(
|
|
|
|
|
name=name,
|
|
|
|
|
type=LayerType.HUBER_REGRESSION,
|
|
|
|
|
inputs=[input.name, label.name],
|
|
|
|
|
delta=delta,
|
|
|
|
|
coeff=coeff,
|
|
|
|
|
**ExtraLayerAttribute.to_kwargs(layer_attr))
|
|
|
|
|
return LayerOutput(
|
|
|
|
|
name, LayerType.HUBER_REGRESSION, parents=[input, label], size=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@wrap_name_default()
|
|
|
|
|
@layer_support()
|
|
|
|
|
def huber_classification_cost(input,
|
|
|
|
|