Embarrassed, i forget to do the type check

release/0.11.0
xzl 8 years ago
parent 1906e63f39
commit a0e77692f3

@ -2048,6 +2048,7 @@ class ParameterReluLayer(LayerBase):
def __init__(self, name, inputs, partial_sum=1, **args): def __init__(self, name, inputs, partial_sum=1, **args):
super(ParameterReluLayer, self).__init__( super(ParameterReluLayer, self).__init__(
name, self.layer_type, 0, inputs=inputs, **args) name, self.layer_type, 0, inputs=inputs, **args)
input_layer = self.get_input_layer(0) input_layer = self.get_input_layer(0)
config_assert(len(self.inputs) == 1, "prelu layer has only one input.") config_assert(len(self.inputs) == 1, "prelu layer has only one input.")
config_assert(input_layer.size % partial_sum == 0, config_assert(input_layer.size % partial_sum == 0,

@ -6442,9 +6442,9 @@ def prelu_layer(input,
""" """
assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.' assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.'
if not param_attr: if not param_attr:
param_attr = ParamAttr(initial_mean=0.25, param_attr = ParamAttr(initial_mean=0.25, initial_std=0.0)
initial_std=0.0)
else: else:
assert isinstance(param_attr, ParameterAttribute) assert isinstance(param_attr, ParameterAttribute)

Loading…
Cancel
Save