|
|
|
@ -62,7 +62,7 @@ def noam_decay(d_model, warmup_steps):
|
|
|
|
|
The decayed learning rate.
|
|
|
|
|
"""
|
|
|
|
|
global_step = _decay_step_counter(1)
|
|
|
|
|
with init_on_cpu():
|
|
|
|
|
|
|
|
|
|
a = global_step**-0.5
|
|
|
|
|
b = (warmup_steps**-1.5) * global_step
|
|
|
|
|
lr_value = (d_model**-0.5) * ops.elementwise_min(a, b)
|
|
|
|
@ -108,8 +108,6 @@ def exponential_decay(learning_rate, decay_steps, decay_rate, staircase=False):
|
|
|
|
|
"""
|
|
|
|
|
global_step = _decay_step_counter()
|
|
|
|
|
|
|
|
|
|
with init_on_cpu():
|
|
|
|
|
# update learning_rate
|
|
|
|
|
div_res = global_step / decay_steps
|
|
|
|
|
if staircase:
|
|
|
|
|
div_res = ops.floor(div_res)
|
|
|
|
@ -138,7 +136,6 @@ def natural_exp_decay(learning_rate, decay_steps, decay_rate, staircase=False):
|
|
|
|
|
"""
|
|
|
|
|
global_step = _decay_step_counter()
|
|
|
|
|
|
|
|
|
|
with init_on_cpu():
|
|
|
|
|
div_res = global_step / decay_steps
|
|
|
|
|
if staircase:
|
|
|
|
|
div_res = ops.floor(div_res)
|
|
|
|
@ -184,7 +181,6 @@ def inverse_time_decay(learning_rate, decay_steps, decay_rate, staircase=False):
|
|
|
|
|
"""
|
|
|
|
|
global_step = _decay_step_counter()
|
|
|
|
|
|
|
|
|
|
with init_on_cpu():
|
|
|
|
|
div_res = global_step / decay_steps
|
|
|
|
|
if staircase:
|
|
|
|
|
div_res = ops.floor(div_res)
|
|
|
|
@ -224,13 +220,10 @@ def polynomial_decay(learning_rate,
|
|
|
|
|
"""
|
|
|
|
|
global_step = _decay_step_counter()
|
|
|
|
|
|
|
|
|
|
with init_on_cpu():
|
|
|
|
|
if cycle:
|
|
|
|
|
div_res = ops.ceil(global_step / decay_steps)
|
|
|
|
|
zero_var = tensor.fill_constant(
|
|
|
|
|
shape=[1], dtype='float32', value=0.0)
|
|
|
|
|
one_var = tensor.fill_constant(
|
|
|
|
|
shape=[1], dtype='float32', value=1.0)
|
|
|
|
|
zero_var = tensor.fill_constant(shape=[1], dtype='float32', value=0.0)
|
|
|
|
|
one_var = tensor.fill_constant(shape=[1], dtype='float32', value=1.0)
|
|
|
|
|
|
|
|
|
|
with control_flow.Switch() as switch:
|
|
|
|
|
with switch.case(global_step == zero_var):
|
|
|
|
|