|
|
|
@ -652,8 +652,8 @@ def selu(x,
|
|
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
|
x (Tensor): The input Tensor with data type float32, float64.
|
|
|
|
|
scale (float, optional): The value of scale for selu. Default is 1.0507009873554804934193349852946
|
|
|
|
|
alpha (float, optional): The value of alpha for selu. Default is 1.6732632423543772848170429916717
|
|
|
|
|
scale (float, optional): The value of scale(must be greater than 1.0) for selu. Default is 1.0507009873554804934193349852946
|
|
|
|
|
alpha (float, optional): The value of alpha(must be no less than zero) for selu. Default is 1.6732632423543772848170429916717
|
|
|
|
|
name (str, optional): Name for the operation (optional, default is None).
|
|
|
|
|
For more information, please refer to :ref:`api_guide_Name`.
|
|
|
|
|
|
|
|
|
@ -672,6 +672,14 @@ def selu(x,
|
|
|
|
|
x = paddle.to_tensor(np.array([[0.0, 1.0],[2.0, 3.0]]))
|
|
|
|
|
out = F.selu(x) # [[0, 1.050701],[2.101402, 3.152103]]
|
|
|
|
|
"""
|
|
|
|
|
if scale <= 1.0:
|
|
|
|
|
raise ValueError(
|
|
|
|
|
"The scale must be greater than 1.0. Received: {}.".format(scale))
|
|
|
|
|
|
|
|
|
|
if alpha < 0:
|
|
|
|
|
raise ValueError(
|
|
|
|
|
"The alpha must be no less than zero. Received: {}.".format(alpha))
|
|
|
|
|
|
|
|
|
|
if in_dygraph_mode():
|
|
|
|
|
return core.ops.selu(x, 'scale', scale, 'alpha', alpha)
|
|
|
|
|
|
|
|
|
|