@ -279,7 +279,7 @@ class AdagradOptimizer(Optimizer):
"""
_moment_acc_str = "moment"
def __init__(self, learning_rate, epsilon):
def __init__(self, learning_rate, epsilon=1.0e-6):
assert learning_rate is not None
assert epsilon is not None
super(AdagradOptimizer, self).__init__()