remove _optimized_guard in dygrahpe_mode (#22143)

* remove _optimized_guard in dygrahpe_mode test=develop

* remove comment code test=develop

* remove list append test=develop

* remove list append test=develop
revert-22710-feature/integrated_ps_api
Aurelius84 5 years ago committed by GitHub
parent a2603c5bf4
commit 60a6d68fb9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -441,17 +441,12 @@ class Optimizer(object):
[p[0] for p in parameters_and_grads if p[0].trainable])
self._create_global_learning_rate()
optimize_ops = []
if framework.in_dygraph_mode():
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None:
continue
with param_and_grad[0].block.program._optimized_guard(
param_and_grad):
if param_and_grad[0].trainable is True:
optimize_op = self._append_optimize_op(target_block,
param_and_grad)
optimize_ops.append(optimize_op)
self._append_optimize_op(target_block, param_and_grad)
else:
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None:
@ -459,9 +454,7 @@ class Optimizer(object):
with param_and_grad[0].block.program._optimized_guard(
param_and_grad), name_scope("optimizer"):
if param_and_grad[0].trainable is True:
optimize_op = self._append_optimize_op(target_block,
param_and_grad)
optimize_ops.append(optimize_op)
self._append_optimize_op(target_block, param_and_grad)
# Get custom finish ops for subclasses
# FIXME: Need to fix this once we figure out how to handle dependencies

Loading…
Cancel
Save