|
|
|
@ -68,16 +68,13 @@ class Optimizer(object):
|
|
|
|
|
grad_clip=None,
|
|
|
|
|
name=None):
|
|
|
|
|
self._parameter_list = parameter_list
|
|
|
|
|
self._name = name
|
|
|
|
|
if framework.in_dygraph_mode():
|
|
|
|
|
if not isinstance(learning_rate, float) and \
|
|
|
|
|
not isinstance(learning_rate, LearningRateDecay):
|
|
|
|
|
raise TypeError(
|
|
|
|
|
"learning rate should be float or LearningRateDecay, got %s here"
|
|
|
|
|
% type(learning_rate))
|
|
|
|
|
if name is not None:
|
|
|
|
|
self._name = unique_name.generate(name)
|
|
|
|
|
else:
|
|
|
|
|
self._name = unique_name.generate(self.__class__.__name__)
|
|
|
|
|
if self._parameter_list is None:
|
|
|
|
|
raise AttributeError(
|
|
|
|
|
"parameter_list argument given to the Optimizer should not be None in dygraph mode."
|
|
|
|
@ -96,7 +93,6 @@ class Optimizer(object):
|
|
|
|
|
raise TypeError(
|
|
|
|
|
"learning rate should be float or Variable, got %s here" %
|
|
|
|
|
type(learning_rate))
|
|
|
|
|
self._name = name
|
|
|
|
|
|
|
|
|
|
if grad_clip is not None:
|
|
|
|
|
if not isinstance(grad_clip, GradientClipBase):
|
|
|
|
|