You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
190 lines
4.2 KiB
190 lines
4.2 KiB
9 years ago
|
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved
|
||
|
#
|
||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
# you may not use this file except in compliance with the License.
|
||
|
# You may obtain a copy of the License at
|
||
|
#
|
||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||
|
#
|
||
|
# Unless required by applicable law or agreed to in writing, software
|
||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
# See the License for the specific language governing permissions and
|
||
|
# limitations under the License.
|
||
|
|
||
|
"""
|
||
|
Paddle Supported Activations.
|
||
|
|
||
|
Each activation inherit BaseActivation, which has two attributes.
|
||
|
|
||
|
- name: activation name in paddle config.
|
||
|
- support_hppl: True if supported by hppl. lstm layer can only use activations
|
||
|
supported by hppl (the name hppl will be revised later).
|
||
|
"""
|
||
|
|
||
|
__all__ = ["TanhActivation", "SigmoidActivation",
|
||
|
"SoftmaxActivation", "IdentityActivation", "LinearActivation",
|
||
|
'SequenceSoftmaxActivation',
|
||
|
"ReluActivation", "BReluActivation", "SoftReluActivation", "STanhActivation",
|
||
|
"AbsActivation", "SquareActivation", "BaseActivation"]
|
||
|
|
||
|
|
||
|
class BaseActivation(object):
|
||
|
"""
|
||
|
A mark for activation class.
|
||
|
"""
|
||
|
|
||
|
def __init__(self, name, support_hppl):
|
||
|
self.name = name
|
||
|
self.support_hppl = support_hppl
|
||
|
|
||
|
|
||
|
class TanhActivation(BaseActivation):
|
||
|
"""
|
||
|
Tanh activation.
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
f(z)=tanh(z)=\\frac{e^z-e^{-z}}{e^z+e^{-z}}
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'tanh', True)
|
||
|
|
||
|
|
||
|
class SigmoidActivation(BaseActivation):
|
||
|
"""
|
||
|
Sigmoid activation.
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
f(z) = \\frac{1}{1+exp(-z)}
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'sigmoid', True)
|
||
|
|
||
|
|
||
|
class SoftmaxActivation(BaseActivation):
|
||
|
"""
|
||
|
Softmax activation for simple input
|
||
|
|
||
|
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
P(y=j|x) = \\frac{e^{x_j}} {\\sum^K_{k=1} e^{x_j} }
|
||
|
"""
|
||
|
|
||
|
def __init__(self):
|
||
|
BaseActivation.__init__(self, 'softmax', False)
|
||
|
|
||
|
|
||
|
class SequenceSoftmaxActivation(BaseActivation):
|
||
|
"""
|
||
|
Softmax activation for one sequence. The dimension of input feature must be
|
||
|
1 and a sequence.
|
||
|
|
||
|
.. code:: python
|
||
|
|
||
|
result = softmax(for each_feature_vector[0] in input_feature)
|
||
|
for i, each_time_step_output in enumerate(output):
|
||
|
each_time_step_output = result[i]
|
||
|
"""
|
||
|
|
||
|
def __init__(self):
|
||
|
BaseActivation.__init__(self, 'sequence_softmax', False)
|
||
|
|
||
|
|
||
|
class IdentityActivation(BaseActivation):
|
||
|
"""
|
||
|
Identity Activation.
|
||
|
|
||
|
Just do nothing for output both forward/backward.
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, '', False)
|
||
|
|
||
|
|
||
|
LinearActivation = IdentityActivation
|
||
|
|
||
|
|
||
|
class ReluActivation(BaseActivation):
|
||
|
"""
|
||
|
Relu activation.
|
||
|
|
||
|
forward. :math:`y = max(0, z)`
|
||
|
|
||
|
derivative:
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
1 &\\quad if z > 0 \\\\
|
||
|
0 &\\quad\\mathrm{otherwize}
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'relu', True)
|
||
|
|
||
|
|
||
|
class BReluActivation(BaseActivation):
|
||
|
"""
|
||
|
BRelu Activation.
|
||
|
|
||
|
forward. :math:`y = min(24, max(0, z))`
|
||
|
|
||
|
derivative:
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
1 &\\quad if 0 < z < 24 \\\\
|
||
|
0 &\\quad \\mathrm{otherwise}
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'brelu', False)
|
||
|
|
||
|
|
||
|
class SoftReluActivation(BaseActivation):
|
||
|
"""
|
||
|
SoftRelu Activation.
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'softrelu', False)
|
||
|
|
||
|
class STanhActivation(BaseActivation):
|
||
|
"""
|
||
|
Scaled Tanh Activation.
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
f(z) = 1.7159 * tanh(2/3*z)
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'stanh', False)
|
||
|
|
||
|
|
||
|
class AbsActivation(BaseActivation):
|
||
|
"""
|
||
|
Abs Activation.
|
||
|
|
||
|
Forward: :math:`f(z) = abs(z)`
|
||
|
|
||
|
Derivative:
|
||
|
|
||
|
.. math::
|
||
|
|
||
|
1 &\\quad if \\quad z > 0 \\\\
|
||
|
-1 &\\quad if \\quad z < 0 \\\\
|
||
|
0 &\\quad if \\quad z = 0
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'abs', False)
|
||
|
|
||
|
|
||
|
class SquareActivation(BaseActivation):
|
||
|
"""
|
||
|
Square Activation.
|
||
|
|
||
|
.. math::
|
||
|
f(z) = z^2.
|
||
|
"""
|
||
|
|
||
|
def __init__(self): BaseActivation.__init__(self, 'square', False)
|