Fix bf16 activations test for softmax and gelu (#29502)

* Fix bf16 activations test for softmax and gelu

* Resolve conflict
revert-31562-mean
joanna.wozna.intel 4 years ago committed by GitHub
parent 60bfd308ab
commit 0ce6d7fa77
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -447,7 +447,6 @@ test_imperative_static_runner_while^|^
test_optimizer_in_control_flow^|^
test_fuse_bn_act_pass^|^
test_fuse_bn_add_act_pass^|^
test_activation_mkldnn_op^|^
test_tsm^|^
test_gru_rnn_op^|^
test_rnn_op^|^

@ -79,6 +79,8 @@ class TestMKLDNNGeluDim2Approx(TestActivation):
self.attrs = {"use_mkldnn": True, "approximate": True}
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim2(TestActivation):
def setUp(self):
self.op_type = "gelu"
@ -98,6 +100,8 @@ class TestMKLDNNGeluBf16Dim2(TestActivation):
pass
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim2Approx(TestActivation):
def setUp(self):
self.op_type = "gelu"
@ -225,6 +229,8 @@ class TestMKLDNNGeluDim4Approx(TestActivation):
self.attrs = {"use_mkldnn": True, "approximate": True}
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim4(TestActivation):
def setUp(self):
self.op_type = "gelu"
@ -244,6 +250,8 @@ class TestMKLDNNGeluBf16Dim4(TestActivation):
pass
@unittest.skipIf(not core.supports_bfloat16(),
"place does not support BF16 evaluation")
class TestMKLDNNGeluBf16Dim4Approx(TestActivation):
def setUp(self):
self.op_type = "gelu"

Loading…
Cancel
Save