|
|
@ -6393,10 +6393,11 @@ def row_conv_layer(input,
|
|
|
|
|
|
|
|
|
|
|
|
@layer_support()
|
|
|
|
@layer_support()
|
|
|
|
@wrap_name_default()
|
|
|
|
@wrap_name_default()
|
|
|
|
@wrap_param_attr_default()
|
|
|
|
|
|
|
|
def prelu_layer(input,
|
|
|
|
def prelu_layer(input,
|
|
|
|
name=None,
|
|
|
|
name=None,
|
|
|
|
partial_sum=1,
|
|
|
|
partial_sum=1,
|
|
|
|
|
|
|
|
channel_shared=None,
|
|
|
|
|
|
|
|
num_channels=None,
|
|
|
|
param_attr=None,
|
|
|
|
param_attr=None,
|
|
|
|
layer_attr=None):
|
|
|
|
layer_attr=None):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
@ -6427,6 +6428,10 @@ def prelu_layer(input,
|
|
|
|
- partial_sum = number of outputs, indicates all elements share the same weight.
|
|
|
|
- partial_sum = number of outputs, indicates all elements share the same weight.
|
|
|
|
|
|
|
|
|
|
|
|
:type partial_sum: int
|
|
|
|
:type partial_sum: int
|
|
|
|
|
|
|
|
:param channel_shared: whether or not the parameter are shared across channels.
|
|
|
|
|
|
|
|
- channel_shared = True, we set the partial_sum to the number of outputs.
|
|
|
|
|
|
|
|
- channel_shared = False, we set the partial_sum to the number of elements in one channel.
|
|
|
|
|
|
|
|
:type channel_shared: bool
|
|
|
|
:param param_attr: The parameter attribute. See ParameterAttribute for details.
|
|
|
|
:param param_attr: The parameter attribute. See ParameterAttribute for details.
|
|
|
|
:type param_attr: ParameterAttribute
|
|
|
|
:type param_attr: ParameterAttribute
|
|
|
|
:param layer_attr: The extra layer attribute. See ExtraLayerAttribute for
|
|
|
|
:param layer_attr: The extra layer attribute. See ExtraLayerAttribute for
|
|
|
@ -6437,7 +6442,22 @@ def prelu_layer(input,
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.'
|
|
|
|
assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.'
|
|
|
|
assert isinstance(param_attr, ParameterAttribute)
|
|
|
|
if not param_attr:
|
|
|
|
|
|
|
|
param_attr = ParamAttr(initial_mean=0.25,
|
|
|
|
|
|
|
|
initial_std=0.0)
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
assert isinstance(param_attr, ParameterAttribute)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if num_channels is None:
|
|
|
|
|
|
|
|
assert input.num_filters is not None
|
|
|
|
|
|
|
|
num_channels = input.num_filters
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if channel_shared is not None:
|
|
|
|
|
|
|
|
assert isinstance(channel_shared, bool)
|
|
|
|
|
|
|
|
if channel_shared:
|
|
|
|
|
|
|
|
partial_sum = input.height * input.width * num_channels
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
partial_sum = input.height * input.width
|
|
|
|
|
|
|
|
|
|
|
|
l = Layer(
|
|
|
|
l = Layer(
|
|
|
|
name=name,
|
|
|
|
name=name,
|
|
|
@ -6449,6 +6469,7 @@ def prelu_layer(input,
|
|
|
|
name=name,
|
|
|
|
name=name,
|
|
|
|
layer_type=LayerType.PRELU,
|
|
|
|
layer_type=LayerType.PRELU,
|
|
|
|
parents=input,
|
|
|
|
parents=input,
|
|
|
|
|
|
|
|
num_filters = num_channels,
|
|
|
|
size=l.config.size)
|
|
|
|
size=l.config.size)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|