Remove optimizer which in fleet, test=develop (#27606)

revert-27356-init_low_level_gloo
WangXi 5 years ago committed by GitHub
parent 68df20d2f2
commit 5641ea2bf6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -14,24 +14,20 @@
__all__ = [ __all__ = [
'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam', 'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam',
'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd',
'DGCMomentumOptimizer', 'Dpsgd', 'DpsgdOptimizer', 'DpsgdOptimizer', 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer',
'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer', 'LambOptimizer', 'LookaheadOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer',
'LarsMomentum', 'LarsMomentumOptimizer', 'LookaheadOptimizer', 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR',
'ModelAverage', 'Momentum', 'MomentumOptimizer', 'PipelineOptimizer', 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR',
'RecomputeOptimizer', 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR',
'_LRScheduler', 'NoamLR', 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'ReduceLROnPlateau', 'CosineAnnealingLR'
'PolynomialLR', 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR',
'LambdaLR', 'ReduceLROnPlateau', 'CosineAnnealingLR'
] ]
from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\ from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\
AdagradOptimizer,DpsgdOptimizer,\ AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \
DecayedAdagradOptimizer,FtrlOptimizer,AdadeltaOptimizer, \ FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \
ModelAverage, LarsMomentum, DGCMomentumOptimizer, LambOptimizer,\ ExponentialMovingAverage, LookaheadOptimizer
ExponentialMovingAverage, PipelineOptimizer, LookaheadOptimizer, \
RecomputeOptimizer, LarsMomentumOptimizer
from .optimizer import Optimizer from .optimizer import Optimizer
from .adam import Adam from .adam import Adam

Loading…
Cancel
Save