From 6d9ae660965d8d60bbb5264a3de9cae9944eb22c Mon Sep 17 00:00:00 2001 From: yaoxuefeng Date: Tue, 29 Sep 2020 14:14:46 +0800 Subject: [PATCH] delete ExponentialMovingAverage in paddle/optimizer (#27683) --- python/paddle/optimizer/__init__.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/python/paddle/optimizer/__init__.py b/python/paddle/optimizer/__init__.py index c9e49ce614..30de88cc29 100644 --- a/python/paddle/optimizer/__init__.py +++ b/python/paddle/optimizer/__init__.py @@ -15,19 +15,19 @@ __all__ = [ 'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam', 'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd', - 'DpsgdOptimizer', 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer', - 'LookaheadOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer', - 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', - 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', - 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', - 'ReduceLROnPlateau', 'CosineAnnealingLR' + 'DpsgdOptimizer', 'Ftrl', 'FtrlOptimizer', 'LookaheadOptimizer', + 'ModelAverage', 'Momentum', 'MomentumOptimizer', 'RMSProp', 'SGD', + 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', 'PiecewiseLR', + 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', 'LinearLrWarmup', + 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', 'ReduceLROnPlateau', + 'CosineAnnealingLR' ] from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\ AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \ FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \ - ExponentialMovingAverage, LookaheadOptimizer + LookaheadOptimizer from .optimizer import Optimizer from .adam import Adam