Revert "make append activation in place by default (#9417)"

This reverts commit ce16400dae.
helinwang-patch-1
Yang Yu 7 years ago
parent 1a4b0d63e4
commit 34a440fa64

@ -398,6 +398,7 @@ class LayerHelper(object):
return input_var
if isinstance(act, basestring):
act = {'type': act}
tmp = self.create_tmp_variable(dtype=input_var.dtype)
if 'use_mkldnn' in self.kwargs:
act['use_mkldnn'] = self.kwargs.get('use_mkldnn')
@ -407,9 +408,9 @@ class LayerHelper(object):
self.append_op(
type=act_type,
inputs={"X": [input_var]},
outputs={"Out": [input_var]},
outputs={"Out": [tmp]},
attrs=act)
return input_var
return tmp
def _get_default_initializer(self, dtype):
if dtype is None or dtype_is_floating(dtype) is True:

Loading…
Cancel
Save