|
|
|
@ -2957,10 +2957,10 @@ def batch_norm_layer(input,
|
|
|
|
|
bias_attr=None,
|
|
|
|
|
param_attr=None,
|
|
|
|
|
layer_attr=None,
|
|
|
|
|
mean_var_names=None,
|
|
|
|
|
batch_norm_type=None,
|
|
|
|
|
moving_average_fraction=0.9,
|
|
|
|
|
use_global_stats=None):
|
|
|
|
|
use_global_stats=None,
|
|
|
|
|
mean_var_names=None):
|
|
|
|
|
"""
|
|
|
|
|
Batch Normalization Layer. The notation of this layer as follow.
|
|
|
|
|
|
|
|
|
@ -3015,8 +3015,6 @@ def batch_norm_layer(input,
|
|
|
|
|
:type param_attr: ParameterAttribute
|
|
|
|
|
:param layer_attr: Extra Layer Attribute.
|
|
|
|
|
:type layer_attr: ExtraLayerAttribute
|
|
|
|
|
:param mean_var_names: [mean name, variance name]
|
|
|
|
|
:type mean_var_names: string list
|
|
|
|
|
:param use_global_stats: whether use moving mean/variance statistics
|
|
|
|
|
during testing peroid. If None or True,
|
|
|
|
|
it will use moving mean/variance statistics during
|
|
|
|
@ -3029,6 +3027,8 @@ def batch_norm_layer(input,
|
|
|
|
|
:math:`runningMean = newMean*(1-factor)
|
|
|
|
|
+ runningMean*factor`
|
|
|
|
|
:type moving_average_fraction: float.
|
|
|
|
|
:param mean_var_names: [mean name, variance name]
|
|
|
|
|
:type mean_var_names: string list
|
|
|
|
|
:return: LayerOutput object.
|
|
|
|
|
:rtype: LayerOutput
|
|
|
|
|
"""
|
|
|
|
@ -3047,10 +3047,10 @@ def batch_norm_layer(input,
|
|
|
|
|
active_type=act.name,
|
|
|
|
|
type=LayerType.BATCH_NORM_LAYER,
|
|
|
|
|
batch_norm_type=batch_norm_type,
|
|
|
|
|
mean_var_names=mean_var_names,
|
|
|
|
|
bias=ParamAttr.to_bias(bias_attr),
|
|
|
|
|
moving_average_fraction=moving_average_fraction,
|
|
|
|
|
use_global_stats=use_global_stats,
|
|
|
|
|
mean_var_names=mean_var_names,
|
|
|
|
|
**ExtraLayerAttribute.to_kwargs(layer_attr))
|
|
|
|
|
|
|
|
|
|
return LayerOutput(
|
|
|
|
|