Add program

revert-11610-move_hooks
yuyang18 7 years ago
parent 958ab99ef8
commit 08995ac94d

@ -215,7 +215,7 @@ def set_gradient_clip(clip, param_list=None, program=None):
def append_gradient_clip_ops(param_grad): def append_gradient_clip_ops(param_grad):
context = dict() context = dict()
for p, g in param_grad: for p, g in param_grad:
with p.block.program.optimized_guard(p): with p.block.program.optimization_guard(p):
clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr()) clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr())
if clip_attr is None: if clip_attr is None:
clip_attr = NullGradientClipAttr() clip_attr = NullGradientClipAttr()
@ -228,7 +228,7 @@ def append_gradient_clip_ops(param_grad):
res = [] res = []
for p, g in param_grad: for p, g in param_grad:
with p.block.program.optimized_guard(p): with p.block.program.optimization_guard(p):
res.append(clip_attr.create_operators(param=p, grad=g)) res.append(clip_attr.create_operators(param=p, grad=g))
return res return res

File diff suppressed because it is too large Load Diff

@ -226,7 +226,7 @@ class Optimizer(object):
optimize_ops = [] optimize_ops = []
for param_and_grad in parameters_and_grads: for param_and_grad in parameters_and_grads:
with param_and_grad[0].block.program.optimized_guard( with param_and_grad[0].block.program.optimization_guard(
param_and_grad[0]): param_and_grad[0]):
if param_and_grad[0].trainable is True and param_and_grad[ if param_and_grad[0].trainable is True and param_and_grad[
1] is not None: 1] is not None:

@ -43,7 +43,7 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
""" """
params_and_grads = [] params_and_grads = []
for param, grad in parameters_and_grads: for param, grad in parameters_and_grads:
with param.block.program.optimized_guard(param): with param.block.program.optimization_guard(param):
# If no gradient then we don't need to do anything # If no gradient then we don't need to do anything
if grad is None: if grad is None:
params_and_grads.append((param, grad)) params_and_grads.append((param, grad))

Loading…
Cancel
Save