From 9cc9f6b4705c4047c9fb511531ae11c9679eabf8 Mon Sep 17 00:00:00 2001 From: chujinjin Date: Wed, 12 Aug 2020 20:11:58 +0800 Subject: [PATCH] fix layernorm grad error in pynative --- .../backend/optimizer/ascend/ascend_backend_optimization.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc b/mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc index dcca95fbc0..f0101e143d 100644 --- a/mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc +++ b/mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc @@ -269,11 +269,11 @@ void AscendBackendIRFusionOptimization(const std::shared_ptrAddPass(std::make_shared()); } else { ir_fusion_pm->AddPass(std::make_shared()); - ir_fusion_pm->AddPass(std::make_shared()); ir_fusion_pm->AddPass(std::make_shared()); ir_fusion_pm->AddPass(std::make_shared()); ir_fusion_pm->AddPass(std::make_shared()); } + ir_fusion_pm->AddPass(std::make_shared()); ir_fusion_pm->AddPass(std::make_shared()); AddAscendIRFusionRulesPass(ir_fusion_pm.get()); AddAscendIRFusionPass(ir_fusion_pm.get());