add device attr for regularizer, test=develop (#24981)

revert-24981-add_device_attr_for_regulization
lilong12 5 years ago committed by GitHub
parent 0b6145e056
commit ab5a1fb853
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -715,8 +715,8 @@ class Optimizer(object):
params_grads = append_gradient_clip_ops(params_grads)
# Add regularization if any
params_grads = append_regularization_ops(params_grads,
self.regularization)
params_grads = append_regularization_ops(
params_grads, self.regularization, self._param_device_map)
optimize_ops = self._create_optimization_pass(params_grads)
if table_optimize_op is not None:
@ -1070,7 +1070,7 @@ class MomentumOptimizer(Optimizer):
class DGCMomentumOptimizer(Optimizer):
"""
:api_attr: Static Graph
:api_attr: Static Graph
DGC (Deep Gradient Compression) Momentum Optimizer. Original paper is https://arxiv.org/abs/1712.01887
@ -2996,7 +2996,7 @@ Lamb = LambOptimizer
class ModelAverage(Optimizer):
"""
:api_attr: Static Graph
:api_attr: Static Graph
The ModelAverage optimizer accumulates specific continuous historical parameters
during training. The accumulated historical range can be controlled by the passed
@ -3305,7 +3305,7 @@ class ModelAverage(Optimizer):
class ExponentialMovingAverage(object):
"""
:api_attr: Static Graph
:api_attr: Static Graph
Compute the moving average of parameters with exponential decay.
Given a parameter :math:`\\theta`, its exponential moving average (EMA)
@ -3555,7 +3555,7 @@ class ExponentialMovingAverage(object):
class PipelineOptimizer(object):
"""
:api_attr: Static Graph
:api_attr: Static Graph
Pipeline Optimizer
@ -3857,7 +3857,7 @@ class PipelineOptimizer(object):
class RecomputeOptimizer(Optimizer):
"""
:api_attr: Static Graph
:api_attr: Static Graph
Recompute Optimizer Wrapper
@ -3931,7 +3931,7 @@ class RecomputeOptimizer(Optimizer):
def load(self, stat_dict):
"""
:api_attr: Static Graph
:api_attr: Static Graph
load function is not supported by Recompute Optimizer for now.
:return: None
@ -4149,7 +4149,7 @@ class RecomputeOptimizer(Optimizer):
class LookaheadOptimizer(object):
"""
:api_attr: Static Graph
:api_attr: Static Graph
This implements the Lookahead optimizer of the
paper : https://arxiv.org/abs/1907.08610.

@ -16,7 +16,7 @@ from __future__ import print_function
import logging
from . import framework
from .framework import in_dygraph_mode, _varbase_creator
from .framework import in_dygraph_mode, _varbase_creator, device_guard
from . import core
__all__ = ['L1Decay', 'L2Decay', 'L1DecayRegularizer', 'L2DecayRegularizer']
@ -62,7 +62,9 @@ def _create_regularization_of_grad(param, grad, regularization=None):
return new_grad
def append_regularization_ops(parameters_and_grads, regularization=None):
def append_regularization_ops(parameters_and_grads,
regularization=None,
param_device_map=None):
"""Create and add backward regularization Operators
Creates and adds backward regularization operators in the BlockDesc.
@ -93,16 +95,19 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
repeate_regularizer = False
with framework.name_scope('regularization'):
for param, grad in parameters_and_grads:
device = param_device_map[
param.name] if param_device_map else None
if not repeate_regularizer and param.regularizer is not None and regularization is not None:
repeate_regularizer = True
logging.info(
"If regularizer of a Parameter has been set by 'fluid.ParamAttr' or 'fluid.WeightNormParamAttr' already. "
"The Regularization[%s] in Optimizer will not take effect, and it will only be applied to other Parameters!"
% regularization.__str__())
with param.block.program._optimized_guard([param, grad]):
new_grad = _create_regularization_of_grad(param, grad,
regularization)
params_and_grads.append((param, new_grad))
with device_guard(device):
with param.block.program._optimized_guard([param, grad]):
new_grad = _create_regularization_of_grad(
param, grad, regularization)
params_and_grads.append((param, new_grad))
return params_and_grads

Loading…
Cancel
Save