Allow setting both is_static and initial_mean, initial_std at the same time.

Some time we simply want to have a fixed initialization for a model.
refactor_docs
xuwei06 8 years ago
parent 21be601be5
commit 35bfb1d5c3

@ -110,15 +110,16 @@ class ParameterAttribute(object):
momentum=None, momentum=None,
gradient_clipping_threshold=None, gradient_clipping_threshold=None,
sparse_update=False): sparse_update=False):
# initialize strategy. self.attr = {}
if is_static: if is_static:
self.attr = {'is_static': True} self.attr['is_static'] = True
elif initial_std is None and initial_mean is None and initial_max \
if initial_std is None and initial_mean is None and initial_max \
is None and initial_min is None: is None and initial_min is None:
self.attr = {'initial_smart': True} self.attr['initial_smart'] = True
elif is_compatible_with(initial_std, float) or \ elif is_compatible_with(initial_std, float) or \
is_compatible_with(initial_mean, float): is_compatible_with(initial_mean, float):
self.attr = dict()
if initial_std is not None: if initial_std is not None:
self.attr['initial_std'] = initial_std self.attr['initial_std'] = initial_std
if initial_mean is not None: if initial_mean is not None:
@ -131,7 +132,6 @@ class ParameterAttribute(object):
assert initial_min < initial_max assert initial_min < initial_max
initial_mean = (initial_max + initial_min) / 2 initial_mean = (initial_max + initial_min) / 2
initial_std = initial_mean - initial_min initial_std = initial_mean - initial_min
self.attr = dict()
self.attr['initial_mean'] = initial_mean self.attr['initial_mean'] = initial_mean
self.attr['initial_std'] = initial_std self.attr['initial_std'] = initial_std
self.attr['initial_strategy'] = 1 # Uniform Random self.attr['initial_strategy'] = 1 # Uniform Random

Loading…
Cancel
Save