rename mse_cost into square_error_cost.

revert-3824-remove_grad_op_type
caoying03 8 years ago
parent c1feb27f75
commit fb49bc2c99

@ -53,7 +53,7 @@ __all__ = [
'cos_sim', 'cos_sim',
'hsigmoid', 'hsigmoid',
'conv_projection', 'conv_projection',
'mse_cost', 'square_error_cost',
'regression_cost', 'regression_cost',
'classification_cost', 'classification_cost',
'LayerOutput', 'LayerOutput',
@ -4238,13 +4238,18 @@ def __cost_input__(input, label, weight=None):
@wrap_name_default() @wrap_name_default()
@layer_support() @layer_support()
def mse_cost(input, label, weight=None, name=None, coeff=1.0, layer_attr=None): def square_error_cost(input,
label,
weight=None,
name=None,
coeff=1.0,
layer_attr=None):
""" """
mean squared error cost: sum of square error cost:
.. math:: .. math::
\\frac{1}{N}\sum_{i=1}^N(t_i-y_i)^2 cost = \\sum_{i=1}^N(t_i-y_i)^2
:param name: layer name. :param name: layer name.
:type name: basestring :type name: basestring
@ -4273,7 +4278,7 @@ def mse_cost(input, label, weight=None, name=None, coeff=1.0, layer_attr=None):
return LayerOutput(name, LayerType.COST, parents=parents, size=1) return LayerOutput(name, LayerType.COST, parents=parents, size=1)
regression_cost = mse_cost regression_cost = square_error_cost
@wrap_name_default("cost") @wrap_name_default("cost")

@ -45,7 +45,7 @@ layers {
coeff: 1.0 coeff: 1.0
} }
layers { layers {
name: "__mse_cost_0__" name: "__square_error_cost_0__"
type: "square_error" type: "square_error"
size: 1 size: 1
active_type: "" active_type: ""
@ -130,7 +130,7 @@ input_layer_names: "label"
input_layer_names: "weight" input_layer_names: "weight"
input_layer_names: "multi_class_label" input_layer_names: "multi_class_label"
output_layer_names: "__cost_0__" output_layer_names: "__cost_0__"
output_layer_names: "__mse_cost_0__" output_layer_names: "__square_error_cost_0__"
output_layer_names: "__nce_layer_0__" output_layer_names: "__nce_layer_0__"
evaluators { evaluators {
name: "classification_error_evaluator" name: "classification_error_evaluator"
@ -146,7 +146,7 @@ sub_models {
layer_names: "weight" layer_names: "weight"
layer_names: "__fc_layer_0__" layer_names: "__fc_layer_0__"
layer_names: "__cost_0__" layer_names: "__cost_0__"
layer_names: "__mse_cost_0__" layer_names: "__square_error_cost_0__"
layer_names: "multi_class_label" layer_names: "multi_class_label"
layer_names: "__nce_layer_0__" layer_names: "__nce_layer_0__"
input_layer_names: "input" input_layer_names: "input"
@ -154,7 +154,7 @@ sub_models {
input_layer_names: "weight" input_layer_names: "weight"
input_layer_names: "multi_class_label" input_layer_names: "multi_class_label"
output_layer_names: "__cost_0__" output_layer_names: "__cost_0__"
output_layer_names: "__mse_cost_0__" output_layer_names: "__square_error_cost_0__"
output_layer_names: "__nce_layer_0__" output_layer_names: "__nce_layer_0__"
evaluator_names: "classification_error_evaluator" evaluator_names: "classification_error_evaluator"
is_recurrent_layer_group: false is_recurrent_layer_group: false

@ -10,7 +10,7 @@ fc = fc_layer(input=data, size=10, act=SoftmaxActivation())
outputs( outputs(
classification_cost( classification_cost(
input=fc, label=lbl, weight=wt), input=fc, label=lbl, weight=wt),
mse_cost( square_error_cost(
input=fc, label=lbl, weight=wt), input=fc, label=lbl, weight=wt),
nce_layer( nce_layer(
input=fc, input=fc,

Loading…
Cancel
Save