@ -73,8 +73,7 @@ class TestXPUSigmoid(TestXPUActivation):
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place (
place , [ ' X ' ] , ' Out ' , max_relative_error = 0.01 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
@ -90,6 +89,11 @@ class TestXPUTanh(TestXPUActivation):
self . inputs = { ' X ' : OpTest . np_dtype_to_fluid_dtype ( x ) }
self . outputs = { ' Out ' : out }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
" core is not compiled with XPU " )
@ -105,6 +109,11 @@ class TestXPUSqrt(TestXPUActivation):
self . inputs = { ' X ' : OpTest . np_dtype_to_fluid_dtype ( x ) }
self . outputs = { ' Out ' : out }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
" core is not compiled with XPU " )
@ -142,6 +151,11 @@ class TestXPURelu(TestXPUActivation):
self . inputs = { ' X ' : x }
self . outputs = { ' Out ' : out }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
" core is not compiled with XPU " )
@ -157,6 +171,11 @@ class TestXPUGelu(TestXPUActivation):
self . outputs = { ' Out ' : out }
self . attrs = { " approximate " : approximate , ' use_xpu ' : True }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
def gelu ( x , approximate ) :
if approximate :
@ -223,6 +242,11 @@ class TestXPUSquare(TestXPUActivation):
self . inputs = { ' X ' : OpTest . np_dtype_to_fluid_dtype ( x ) }
self . outputs = { ' Out ' : out }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
" core is not compiled with XPU " )
@ -239,5 +263,36 @@ class TestXPUPow(TestXPUActivation):
self . outputs = { ' Out ' : out }
@unittest.skipIf ( not paddle . is_compiled_with_xpu ( ) ,
" core is not compiled with XPU " )
class TestXPULeakyRelu ( TestXPUActivation ) :
def setUp ( self ) :
self . op_type = " leaky_relu "
self . init_dtype ( )
x = np . random . uniform ( - 1 , 1 , [ 11 , 17 ] ) . astype ( self . dtype )
alpha = np . random . uniform (
0 ,
1 , )
out = leaky_relu ( x , alpha )
self . inputs = { ' X ' : x }
self . outputs = { ' Out ' : out }
self . attrs = { ' use_xpu ' : True , ' alpha ' : alpha }
def test_check_grad ( self ) :
if paddle . is_compiled_with_xpu ( ) :
place = paddle . XPUPlace ( 0 )
self . check_grad_with_place ( place , [ ' X ' ] , ' Out ' )
def leaky_relu ( x , alpha ) :
if ( alpha < 1 ) :
y_ref = np . maximum ( x , alpha * x )
else :
y_ref = np . minimum ( x , alpha * x )
return y_ref . astype ( x . dtype )
if __name__ == " __main__ " :
paddle . enable_static ( )
unittest . main ( )