Op(leaky_relu) error message enhancement (#23627)

revert-23830-2.0-beta
zhupengyang 6 years ago committed by GitHub
parent 06d4aa4e73
commit 5d970b586b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -9238,6 +9238,9 @@ def leaky_relu(x, alpha=0.02, name=None):
if in_dygraph_mode():
return core.ops.leaky_relu(x, 'alpha', alpha)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'],
'leaky_relu')
inputs = {'X': [x]}
attrs = {'alpha': alpha}
helper = LayerHelper('leaky_relu', **locals())

@ -450,6 +450,20 @@ class TestLeakyRelu(TestActivation):
self.check_grad(['X'], 'Out')
class TestLeakyReluOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program()):
# The input type must be Variable.
self.assertRaises(TypeError, fluid.layers.leaky_relu, 1)
# The input dtype must be float16, float32, float64.
x_int32 = fluid.data(name='x_int32', shape=[12, 10], dtype='int32')
self.assertRaises(TypeError, fluid.layers.leaky_relu, x_int32)
# support the input dtype is float32
x_fp16 = fluid.layers.data(
name='x_fp16', shape=[12, 10], dtype='float32')
fluid.layers.leaky_relu(x_fp16)
def gelu(x, approximate):
if approximate:
y_ref = 0.5 * x * (1.0 + np.tanh(

Loading…
Cancel
Save