Add name in relu and log API. (#12438)

revert-12469-sum_op_dim_fix
qingqing01 7 years ago committed by GitHub
parent bf3c34960f
commit c44fb00371
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -155,8 +155,8 @@ paddle.fluid.layers.resize_bilinear ArgSpec(args=['input', 'out_shape', 'scale',
paddle.fluid.layers.gather ArgSpec(args=['input', 'index'], varargs=None, keywords=None, defaults=None)
paddle.fluid.layers.random_crop ArgSpec(args=['x', 'shape', 'seed'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.mean_iou ArgSpec(args=['input', 'label', 'num_classes'], varargs=None, keywords=None, defaults=None)
paddle.fluid.layers.relu ArgSpec(args=['x'], varargs=None, keywords=None, defaults=None)
paddle.fluid.layers.log ArgSpec(args=['x'], varargs=None, keywords=None, defaults=None)
paddle.fluid.layers.relu ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.log ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.crop ArgSpec(args=['x', 'shape', 'offsets', 'name'], varargs=None, keywords=None, defaults=(None, None, None))
paddle.fluid.layers.rank_loss ArgSpec(args=['label', 'left', 'right', 'name'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None))

@ -5090,7 +5090,7 @@ def random_crop(x, shape, seed=None):
return out
def log(x):
def log(x, name=None):
"""
Calculates the natural log of the given input tensor, element-wise.
@ -5100,6 +5100,8 @@ def log(x):
Args:
x (Variable): Input tensor.
name (str|None, default None): A name for this layer If set None,
the layer will be named automatically.
Returns:
Variable: The natural log of the input tensor computed element-wise.
@ -5117,7 +5119,7 @@ def log(x):
return out
def relu(x):
def relu(x, name=None):
"""
Relu takes one input data (Tensor) and produces one output data (Tensor)
where the rectified linear function, y = max(0, x), is applied to
@ -5129,6 +5131,8 @@ def relu(x):
Args:
x (Variable): The input tensor.
name (str|None, default None): A name for this layer If set None,
the layer will be named automatically.
Returns:
Variable: The output tensor with the same shape as input.

Loading…
Cancel
Save