Do not change API in doc PR

revert-11610-move_hooks
yuyang18 7 years ago
parent 7747e01b71
commit 1c19f1ab44

@ -215,7 +215,7 @@ def set_gradient_clip(clip, param_list=None, program=None):
def append_gradient_clip_ops(param_grad):
context = dict()
for p, g in param_grad:
with p.block.program.optimization_guard(p):
with p.block.program.optimized_guard(p):
clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr())
if clip_attr is None:
clip_attr = NullGradientClipAttr()
@ -228,7 +228,7 @@ def append_gradient_clip_ops(param_grad):
res = []
for p, g in param_grad:
with p.block.program.optimization_guard(p):
with p.block.program.optimized_guard(p):
res.append(clip_attr.create_operators(param=p, grad=g))
return res

@ -1103,7 +1103,7 @@ class Program(object):
self._op_role_var = [var_name]
@contextlib.contextmanager
def optimization_guard(self, var):
def optimized_guard(self, var):
"""
A with guard to set :code:`Optimization` :code:`OpRole` and
:code:`OpRoleVar` automatically.
@ -1116,7 +1116,7 @@ class Program(object):
Examples:
>>> p, g = backward(...)
>>> with program.optimization_guard(p):
>>> with program.optimized_guard(p):
>>> p = p - 0.001 * g
"""
OpRole = core.op_proto_and_checker_maker.OpRole

@ -226,7 +226,7 @@ class Optimizer(object):
optimize_ops = []
for param_and_grad in parameters_and_grads:
with param_and_grad[0].block.program.optimization_guard(
with param_and_grad[0].block.program.optimized_guard(
param_and_grad[0]):
if param_and_grad[0].trainable is True and param_and_grad[
1] is not None:

@ -43,7 +43,7 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
"""
params_and_grads = []
for param, grad in parameters_and_grads:
with param.block.program.optimization_guard(param):
with param.block.program.optimized_guard(param):
# If no gradient then we don't need to do anything
if grad is None:
params_and_grads.append((param, grad))

Loading…
Cancel
Save