Merge pull request #9420 from tonyyang-svail/in_place_bn

In place batch_norm
helinwang-patch-1
Yu Yang 7 years ago committed by GitHub
commit 47e4afbe65
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1483,6 +1483,7 @@ def batch_norm(input,
param_attr=None,
bias_attr=None,
data_layout='NCHW',
in_place=False,
name=None,
moving_mean_name=None,
moving_variance_name=None):
@ -1538,7 +1539,7 @@ def batch_norm(input,
saved_mean = helper.create_tmp_variable(dtype=dtype, stop_gradient=True)
saved_variance = helper.create_tmp_variable(dtype=dtype, stop_gradient=True)
batch_norm_out = helper.create_tmp_variable(dtype)
batch_norm_out = input if in_place else helper.create_tmp_variable(dtype)
helper.append_op(
type="batch_norm",

@ -98,7 +98,7 @@ def img_conv_group(input,
use_mkldnn=use_mkldnn)
if conv_with_batchnorm[i]:
tmp = layers.batch_norm(input=tmp, act=conv_act)
tmp = layers.batch_norm(input=tmp, act=conv_act, in_place=True)
drop_rate = conv_batchnorm_drop_rate[i]
if abs(drop_rate) > 1e-5:
tmp = layers.dropout(x=tmp, dropout_prob=drop_rate)

Loading…
Cancel
Save