|
|
|
@ -19,7 +19,8 @@ import numpy as np
|
|
|
|
|
from scipy.special import expit
|
|
|
|
|
import paddle.fluid.core as core
|
|
|
|
|
from paddle.fluid.tests.unittests.op_test import OpTest
|
|
|
|
|
from paddle.fluid.tests.unittests.test_activation_op import TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish
|
|
|
|
|
from paddle.fluid.tests.unittests.test_activation_op import TestActivation, TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish
|
|
|
|
|
from paddle.fluid.tests.unittests.test_gelu_op import gelu
|
|
|
|
|
from mkldnn_op_test import check_if_mkldnn_primitives_exist_in_bwd
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -59,6 +60,32 @@ class TestMKLDNNLeakyReluDim2(TestLeakyRelu):
|
|
|
|
|
['X'], 'Out', max_relative_error=0.007, check_dygraph=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNGeluDim2(TestActivation):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "gelu"
|
|
|
|
|
self.dtype = np.float32
|
|
|
|
|
|
|
|
|
|
x = np.random.uniform(-1, 1, [11, 17]).astype(self.dtype)
|
|
|
|
|
out = gelu(x, False)
|
|
|
|
|
|
|
|
|
|
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
|
|
|
|
|
self.outputs = {'Out': out}
|
|
|
|
|
self.attrs = {"use_mkldnn": True}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNGeluDim2Approx(TestActivation):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "gelu"
|
|
|
|
|
self.dtype = np.float32
|
|
|
|
|
|
|
|
|
|
x = np.random.uniform(-1, 1, [11, 17]).astype(self.dtype)
|
|
|
|
|
out = gelu(x, True)
|
|
|
|
|
|
|
|
|
|
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
|
|
|
|
|
self.outputs = {'Out': out}
|
|
|
|
|
self.attrs = {"use_mkldnn": True, "approximate": True}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNTanhDim2(TestTanh):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
super(TestMKLDNNTanhDim2, self).setUp()
|
|
|
|
@ -185,6 +212,32 @@ class TestMKLDNNLeakyReluDim4(TestLeakyRelu):
|
|
|
|
|
['X'], 'Out', max_relative_error=0.007, check_dygraph=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNGeluDim4(TestActivation):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "gelu"
|
|
|
|
|
self.dtype = np.float32
|
|
|
|
|
|
|
|
|
|
x = np.random.uniform(-1, 1, [2, 4, 3, 5]).astype(self.dtype)
|
|
|
|
|
out = gelu(x, False)
|
|
|
|
|
|
|
|
|
|
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
|
|
|
|
|
self.outputs = {'Out': out}
|
|
|
|
|
self.attrs = {"use_mkldnn": True}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNGeluDim4Approx(TestActivation):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "gelu"
|
|
|
|
|
self.dtype = np.float32
|
|
|
|
|
|
|
|
|
|
x = np.random.uniform(-1, 1, [2, 4, 3, 5]).astype(self.dtype)
|
|
|
|
|
out = gelu(x, True)
|
|
|
|
|
|
|
|
|
|
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
|
|
|
|
|
self.outputs = {'Out': out}
|
|
|
|
|
self.attrs = {"use_mkldnn": True, "approximate": True}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMKLDNNTanhDim4(TestTanh):
|
|
|
|
|
def setUp(self):
|
|
|
|
|
super(TestMKLDNNTanhDim4, self).setUp()
|
|
|
|
|