add trace when mindspore error

pull/8903/head
jjfeing 4 years ago
parent 364c01d677
commit 27257b9901

@ -27,7 +27,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "backend/session/kernel_graph.h"
#include "utils/ms_context.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
using KernelBuildInfoBuilder = kernel::KernelBuildInfo::KernelBuildInfoBuilder;
@ -133,7 +133,7 @@ AnfNodePtr InsertTransOpForSingleOutput(const FuncGraphPtr &func_graph, const An
std::vector<size_t> origin_shape = AnfAlgo::GetOutputInferShape(node, 0);
if (output_format == kOpFormat_NC1KHKWHWC0) {
MS_LOG(EXCEPTION) << "got the hw format " << output_format << "when insert the transdata node "
<< node->DebugString();
<< node->DebugString() << " trace: " << trace::DumpSourceLines(node);
}
if (kCommonFormatSet.find(output_format) == kCommonFormatSet.end() && origin_shape.size() > 1) {
MS_LOG(DEBUG) << "Inserted Transdata " << output_format << " To default , index :0";
@ -166,7 +166,7 @@ AnfNodePtr InsertTransOpForMultipleOutput(const FuncGraphPtr &func_graph, const
std::string output_format = AnfAlgo::GetOutputFormat(node, output_idx);
if (output_format == kOpFormat_NC1KHKWHWC0) {
MS_LOG(EXCEPTION) << "Got the special format" << output_format << " when insert the transdata node "
<< node->DebugString();
<< node->DebugString() << " trace: " << trace::DumpSourceLines(node);
}
auto tuple_getitem = CreatTupleGetItemNode(func_graph, node, output_idx);
std::vector<size_t> origin_shape = AnfAlgo::GetOutputInferShape(node, output_idx);

@ -19,6 +19,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "frontend/optimizer/opt.h"
#include "backend/optimizer/ascend/ascend_helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -45,7 +46,8 @@ bool IsPartOutputsOfHcclOp(const AnfNodePtr &node, const CNodePtr &cur_hccl, con
auto &node_users = manager->node_users();
auto iter = node_users.find(prev_hccl_op);
if (iter == node_users.end()) {
MS_LOG(EXCEPTION) << "node has no output in manager";
MS_LOG(EXCEPTION) << "node has no output in manager"
<< " trace: " << trace::DumpSourceLines(cur_hccl);
}
for (const auto &node_index : iter->second) {
AnfNodePtr output = node_index.first;
@ -79,7 +81,8 @@ AnfNodePtr InsertMemcpyAsyncForCascade::InsertMemcpyAsync(const FuncGraphPtr &gr
if (IsPartOutputsOfHcclOp(input, hccl_node, graph)) {
auto memcpy_async = CreateMemcpyAsyncOp(graph, input);
if (memcpy_async == nullptr) {
MS_LOG(EXCEPTION) << "Create memcpy_async op failed.";
MS_LOG(EXCEPTION) << "Create memcpy_async op failed."
<< " trace: " << trace::DumpSourceLines(hccl_node);
}
if (AnfAlgo::IsNodeDynamicShape(input)) {
AnfAlgo::SetNodeAttr(kAttrIsDynamicShape, MakeValue(true), memcpy_async);

@ -21,7 +21,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "frontend/optimizer/opt.h"
#include "backend/optimizer/ascend/ascend_helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
namespace {
@ -61,7 +61,8 @@ void DealControlForGetitem(const CNodePtr &tuple_getitem, const FuncGraphPtr &gr
auto &node_users = manager->node_users();
auto iter = node_users.find(tuple_getitem);
if (iter == node_users.end()) {
MS_LOG(EXCEPTION) << "node has no output in manager";
MS_LOG(EXCEPTION) << "node has no output in manager"
<< " trace: " << trace::DumpSourceLines(hccl_node);
}
for (const auto &node_index : iter->second) {
AnfNodePtr output = node_index.first;
@ -81,7 +82,8 @@ void TransferControl(const CNodePtr &hccl_node, const std::vector<AnfNodePtr> &m
auto &node_users = manager->node_users();
auto iter = node_users.find(hccl_node);
if (iter == node_users.end()) {
MS_LOG(EXCEPTION) << "node has no output in manager";
MS_LOG(EXCEPTION) << "node has no output in manager"
<< " trace: " << trace::DumpSourceLines(hccl_node);
}
// find hccl_node's output which is a control depend
for (const auto &node_index : iter->second) {
@ -140,7 +142,8 @@ bool InsertMemcpyAsyncForHcclOp::NeedInsertMemcpy(const FuncGraphPtr &graph, con
// when input is used by others
auto iter = node_users.find(input);
if (iter == node_users.end()) {
MS_LOG(EXCEPTION) << "node has no output in manager";
MS_LOG(EXCEPTION) << "node has no output in manager"
<< " trace: " << trace::DumpSourceLines(input);
}
if (IsNodeOutPutUsedByOtherRealKernel(iter->second)) {
return true;

@ -18,6 +18,7 @@
#include <vector>
#include <memory>
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -39,7 +40,7 @@ const AnfNodePtr SplitUnsupportedTransData::Process(const FuncGraphPtr &func_gra
MS_EXCEPTION_IF_NULL(kernel_info);
if (kernel_info->GetInputNum() != 1 || kernel_info->GetOutputNum() != 1) {
MS_LOG(EXCEPTION) << "Transdata node's kernel info's input and output format size is not 1"
<< ori_trans_data->DebugString();
<< ori_trans_data->DebugString() << trace::DumpSourceLines(node);
}
return SplitTransData(func_graph, ori_trans_data);
}

@ -19,6 +19,7 @@
#include <algorithm>
#include "backend/session/anf_runtime_algorithm.h"
#include "backend/optimizer/common/helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -65,7 +66,7 @@ AnfNodePtr CreateBNTrainingReduce(const FuncGraphPtr &func_graph, const AnfNodeP
MS_EXCEPTION_IF_NULL(bn_cnode);
if (bn_cnode->inputs().size() < kBatchNormRealInputNum + 1) {
MS_LOG(EXCEPTION) << "The input size of node " + bn_cnode->DebugString() + " is less than "
<< kBatchNormRealInputNum + 1;
<< kBatchNormRealInputNum + 1 << " trace: " << trace::DumpSourceLines(bn);
}
std::vector<AnfNodePtr> bn_training_reduce_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingReduceOpName)), bn_cnode->input(1)};
@ -91,11 +92,12 @@ AnfNodePtr CreateBNTrainingUpdateV2(const FuncGraphPtr &func_graph, const AnfNod
MS_EXCEPTION_IF_NULL(bn_cnode);
if (bn_cnode->inputs().size() < kBatchNormRealInputNum + 1) {
MS_LOG(EXCEPTION) << "The input size of node " + bn_cnode->DebugString() + " is less than "
<< kBatchNormRealInputNum + 1;
<< kBatchNormRealInputNum + 1 << " trace: " << trace::DumpSourceLines(bn);
}
if (bn_training_reduce_outputs.size() != kBNTrainingReduceOutputNum) {
MS_LOG(EXCEPTION) << "The output size of node bn_training_reduce must be " << kBNTrainingReduceOutputNum
<< ", but it is " << bn_training_reduce_outputs.size();
<< ", but it is " << bn_training_reduce_outputs.size()
<< " trace: " << trace::DumpSourceLines(bn);
}
std::vector<AnfNodePtr> bn_training_update_v2_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingUpdateV2OpName)),
@ -111,7 +113,7 @@ AnfNodePtr CreateBNTrainingUpdateV2(const FuncGraphPtr &func_graph, const AnfNod
MS_EXCEPTION_IF_NULL(bn_abstract_tuple);
if (bn_abstract_tuple->elements().size() != kBatchNormOutputNum) {
MS_LOG(EXCEPTION) << "The abstract size of node bn must be " << kBatchNormOutputNum << ", but it is "
<< bn_abstract_tuple->elements().size();
<< bn_abstract_tuple->elements().size() << " trace: " << trace::DumpSourceLines(bn);
}
std::vector<AbstractBasePtr> abstract_list{bn_abstract_tuple->elements()[0], bn_abstract_tuple->elements()[3],
bn_abstract_tuple->elements()[4]};
@ -155,7 +157,8 @@ const AnfNodePtr BatchNormBertFission::Process(const FuncGraphPtr &func_graph, c
&bn_training_update_v2_outputs);
if (bn_training_update_v2_outputs.size() != kBNTrainingUpdateV2OutputNum) {
MS_LOG(EXCEPTION) << "The output size of node bn_training_reduce must be " << kBNTrainingUpdateV2OutputNum
<< ", but it is " << bn_training_update_v2_outputs.size();
<< ", but it is " << bn_training_update_v2_outputs.size()
<< " trace: " << trace::DumpSourceLines(node);
}
auto manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);

@ -17,6 +17,7 @@
#include <vector>
#include "backend/optimizer/common/helper.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -61,15 +62,18 @@ AnfNodePtr BatchNormGradInferFission::CreateBNInferGrad(const FuncGraphPtr &func
// Set inputs
auto iter_input0 = (*equiv).find(input0_var_);
if (iter_input0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input0 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
auto iter_input2 = (*equiv).find(input2_var_);
if (iter_input2 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input2 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input2 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
auto iter_input4 = (*equiv).find(input4_var_);
if (iter_input4 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input4 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input4 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
std::vector<AnfNodePtr> bn_infer_grad_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNInferGradOpName)), utils::cast<AnfNodePtr>(iter_input0->second),
@ -80,7 +84,8 @@ AnfNodePtr BatchNormGradInferFission::CreateBNInferGrad(const FuncGraphPtr &func
auto bn_grad_abstract_tuple = dyn_cast<abstract::AbstractTuple>(bn_grad->abstract());
MS_EXCEPTION_IF_NULL(bn_grad_abstract_tuple);
if (bn_grad_abstract_tuple->elements().empty()) {
MS_LOG(EXCEPTION) << "The abstract tuple of node " << bn_grad->DebugString() << "should not be empty";
MS_LOG(EXCEPTION) << "The abstract tuple of node " << bn_grad->DebugString() << "should not be empty"
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
bn_infer_grad->set_abstract(bn_grad_abstract_tuple->elements()[0]);
AnfAlgo::CopyNodeAttr(kAttrEpsilon, bn_grad, bn_infer_grad);
@ -97,19 +102,23 @@ AnfNodePtr BatchNormGradInferFission::CreateBNTrainingUpdateGrad(const FuncGraph
// Set inputs
auto iter_input0 = (*equiv).find(input0_var_);
if (iter_input0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input0 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
auto iter_input1 = (*equiv).find(input1_var_);
if (iter_input1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input1 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
auto iter_input3 = (*equiv).find(input3_var_);
if (iter_input3 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input3 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input3 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
auto iter_input4 = (*equiv).find(input4_var_);
if (iter_input4 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input4 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the input4 var after matched."
<< " trace: " << trace::DumpSourceLines(bn_grad);
}
std::vector<AnfNodePtr> bn_training_update_grad_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingUpdateGradOpName)),
@ -121,7 +130,8 @@ AnfNodePtr BatchNormGradInferFission::CreateBNTrainingUpdateGrad(const FuncGraph
auto bn_grad_abstract_tuple = dyn_cast<abstract::AbstractTuple>(bn_grad->abstract());
MS_EXCEPTION_IF_NULL(bn_grad_abstract_tuple);
if (bn_grad_abstract_tuple->elements().size() < kBatchNormGradInferOutputNum) {
MS_LOG(EXCEPTION) << "The abstract tuple of node " << bn_grad->DebugString() << "should not be less than 3";
MS_LOG(EXCEPTION) << "The abstract tuple of node " << bn_grad->DebugString() << "should not be less than 3"
<< trace::DumpSourceLines(bn_grad);
}
std::vector<AbstractBasePtr> abstract_list{bn_grad_abstract_tuple->elements()[1],
bn_grad_abstract_tuple->elements()[2]};
@ -160,7 +170,8 @@ const AnfNodePtr BatchNormGradInferFission::Process(const FuncGraphPtr &func_gra
&bn_training_update_grad_outputs);
if (bn_training_update_grad_outputs.size() != kBNTrainingUpdateGradOutputNum) {
MS_LOG(EXCEPTION) << "The output size of " << bn_training_update_grad << " should be "
<< kBNTrainingUpdateGradOutputNum << ", but it is " << bn_training_update_grad_outputs.size();
<< kBNTrainingUpdateGradOutputNum << ", but it is " << bn_training_update_grad_outputs.size()
<< trace::DumpSourceLines(node);
}
std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple), bn_infer_grad,
bn_training_update_grad_outputs[0], bn_training_update_grad_outputs[1]};

@ -23,6 +23,7 @@
#include "backend/optimizer/common/helper.h"
#include "runtime/device/kernel_info.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -33,7 +34,8 @@ void CreateOutputsOfUpdateGrad(const FuncGraphPtr &graph, const CNodePtr &bn_gra
MS_EXCEPTION_IF_NULL(bn_grad_node);
const auto &bn_grad_inputs = bn_grad_node->inputs();
if (bn_grad_inputs.size() < kBNGradInputNum) {
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size";
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size."
<< " trace: " << trace::DumpSourceLines(bn_grad_node);
}
std::vector<AnfNodePtr> bn_update_grad_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingUpdateGradOpName)), bn_grad_inputs[1], bn_grad_inputs[2],
@ -59,10 +61,12 @@ void CreateOutputsOfReduceGrad(const FuncGraphPtr &graph, const CNodePtr &bn_gra
MS_EXCEPTION_IF_NULL(bn_reduce_grad_outputs);
const auto &bn_grad_inputs = bn_grad_node->inputs();
if (bn_grad_inputs.size() < kBNGradInputNum) {
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size";
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size"
<< " trace: " << trace::DumpSourceLines(bn_grad_node);
}
if (bn_update_grad_outputs.size() != kBNTrainingUpdateGradOutputNum) {
MS_LOG(EXCEPTION) << "BNTrainingReduceGrad_outputs has wrong size";
MS_LOG(EXCEPTION) << "BNTrainingReduceGrad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(bn_grad_node);
}
std::vector<AnfNodePtr> bn_reduce_grad_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingReduceGradOpName)),
@ -101,7 +105,8 @@ const AnfNodePtr BatchNormGradSplit::Process(const FuncGraphPtr &func_graph, con
auto primitive = AnfAlgo::GetCNodePrimitive(cnode);
MS_EXCEPTION_IF_NULL(primitive);
if (!primitive->HasAttr(kAttrIsTraining)) {
MS_LOG(INFO) << "Op BatchNormGrad must have attrs of is_training";
MS_LOG(INFO) << "Op BatchNormGrad must have attrs of is_training"
<< " trace: " << trace::DumpSourceLines(node);
return nullptr;
}
if (!AnfAlgo::GetNodeAttr<bool>(cnode, kAttrIsTraining)) {
@ -112,13 +117,15 @@ const AnfNodePtr BatchNormGradSplit::Process(const FuncGraphPtr &func_graph, con
std::vector<AnfNodePtr> bn_update_grad_outputs;
CreateOutputsOfUpdateGrad(func_graph, cnode, &bn_update_grad_outputs);
if (bn_update_grad_outputs.size() != kBNTrainingUpdateGradOutputNum) {
MS_LOG(EXCEPTION) << "bn_update_grad_outputs has wrong size";
MS_LOG(EXCEPTION) << "bn_update_grad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(node);
}
std::vector<AnfNodePtr> bn_reduce_grad_outputs;
CreateOutputsOfReduceGrad(func_graph, cnode, bn_update_grad_outputs, &bn_reduce_grad_outputs);
if (bn_reduce_grad_outputs.size() != kSingleOutputNum) {
MS_LOG(EXCEPTION) << "bn_reduce_grad_outputs has wrong size";
MS_LOG(EXCEPTION) << "bn_reduce_grad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(node);
}
std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple), bn_reduce_grad_outputs[0],

@ -23,6 +23,7 @@
#include "backend/optimizer/common/helper.h"
#include "runtime/device/kernel_info.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -33,7 +34,8 @@ void CreateOutputsOfUpdateGrad(const FuncGraphPtr &graph, const CNodePtr &bn_gra
MS_EXCEPTION_IF_NULL(bn_grad_node);
auto bn_grad_inputs = bn_grad_node->inputs();
if (bn_grad_inputs.size() != kBNGradInputNum) {
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size";
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size"
<< " trace: " << trace::DumpSourceLines(bn_grad_node);
}
std::vector<AnfNodePtr> bn_update_grad_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingUpdateGradOpName)), bn_grad_inputs[1], bn_grad_inputs[2],
@ -58,7 +60,8 @@ void CreateOutputsOfReduceGrad(const FuncGraphPtr &graph, const CNodePtr &bn_gra
MS_EXCEPTION_IF_NULL(bn_grad_node);
auto bn_grad_inputs = bn_grad_node->inputs();
if (bn_grad_inputs.size() != kBNGradInputNum) {
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size";
MS_LOG(EXCEPTION) << "BNGrad has wrong inputs size"
<< " trace: " << trace::DumpSourceLines(bn_grad_node);
}
if (bn_update_grad_outputs.size() != kBNTrainingUpdateGradOutputNum) {
MS_LOG(EXCEPTION) << "bn_update_grad_outputs has wrong size";
@ -90,13 +93,15 @@ CNodePtr BNGradSplitForTBE(const FuncGraphPtr &func_graph, const CNodePtr &cnode
std::vector<AnfNodePtr> bn_update_grad_outputs;
CreateOutputsOfUpdateGrad(func_graph, cnode, &bn_update_grad_outputs);
if (bn_update_grad_outputs.size() != kBNTrainingUpdateGradOutputNum) {
MS_LOG(EXCEPTION) << "bn_update_grad_outputs has wrong size";
MS_LOG(EXCEPTION) << "bn_update_grad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(cnode);
}
std::vector<AnfNodePtr> bn_reduce_grad_outputs;
CreateOutputsOfReduceGrad(func_graph, cnode, bn_update_grad_outputs, &bn_reduce_grad_outputs);
if (bn_reduce_grad_outputs.size() != 1) {
MS_LOG(EXCEPTION) << "bn_reduce_grad_outputs has wrong size";
MS_LOG(EXCEPTION) << "bn_reduce_grad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(cnode);
}
std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple), bn_reduce_grad_outputs[0],

@ -23,6 +23,7 @@
#include "backend/optimizer/common/helper.h"
#include "runtime/device/kernel_info.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -64,10 +65,12 @@ AnfNodePtr CreateOutputsOfBNTrainingUpdate(const FuncGraphPtr &graph, const CNod
MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(bn_cnode);
if (bn_cnode->inputs().size() != kBnInputNum) {
MS_LOG(EXCEPTION) << "BN node has wrong input size";
MS_LOG(EXCEPTION) << "BN node has wrong input size"
<< " trace: " << trace::DumpSourceLines(bn_cnode);
}
if (bn_training_reduce_outputs.size() != kBNTrainingReduceOutputNum) {
MS_LOG(EXCEPTION) << "BN1 outputs has wrong input size";
MS_LOG(EXCEPTION) << "BN1 outputs has wrong input size"
<< " trace: " << trace::DumpSourceLines(bn_cnode);
}
// the inputs of BNTrainingUpdate are from the outputs of BNTrainingReduce and the inputs of BN
std::vector<AnfNodePtr> bn_training_update_inputs = {
@ -110,7 +113,8 @@ AnfNodePtr SplitFusedBatchNormForTBE(const FuncGraphPtr &func_graph, const AnfNo
return nullptr;
}
if (bn_training_reduce_outputs.size() != kBN1OutputNum) {
MS_LOG(EXCEPTION) << "make outputs of op BNTrainingReduce fail";
MS_LOG(EXCEPTION) << "make outputs of op BNTrainingReduce fail"
<< " trace: " << trace::DumpSourceLines(node);
}
// Create BNTrainingUpdate node

@ -17,6 +17,7 @@
#include <memory>
#include <vector>
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -47,7 +48,8 @@ AnfNodePtr CreateNewConcat(const FuncGraphPtr &func_graph, const CNodePtr &origi
}
auto output_shape = AnfAlgo::GetOutputInferShape(origin_concat_cnode, 0);
if (axis < 0 || axis >= SizeToLong(output_shape.size()) || axis >= SizeToLong(input_shape.size())) {
MS_LOG(EXCEPTION) << "The concat_dim value " << axis << "is out of range";
MS_LOG(EXCEPTION) << "The concat_dim value " << axis << "is out of range"
<< " trace: " << trace::DumpSourceLines(origin_concat_cnode);
}
output_shape[axis] = input_shape[axis] * offset;
AnfAlgo::SetOutputInferTypeAndShape({AnfAlgo::GetOutputInferDataType(origin_concat_cnode, 0)}, {output_shape},

@ -17,6 +17,7 @@
#include <vector>
#include <memory>
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -290,7 +291,8 @@ AnfNodePtr CreateHConcat(const FuncGraphPtr &func_graph, const CNodePtr &dynamic
std::vector<AnfNodePtr> splitv_outputs;
CreateMultipleOutputsOfAnfNode(func_graph, splitv, kSplitVOutputNum, &splitv_outputs);
if (splitv_outputs.size() != kSplitVOutputNum) {
MS_LOG(EXCEPTION) << "Create outputs of node " << splitv->DebugString() << " failed";
MS_LOG(EXCEPTION) << "Create outputs of node " << splitv->DebugString() << " failed"
<< " trace: " << trace::DumpSourceLines(dynamic_rnn_grad_cnode);
}
auto origin_input4 = dynamic_rnn_grad_cnode->input(5);
auto origin_input4_shape = AnfAlgo::GetOutputInferShape(origin_input4, 0);

@ -19,6 +19,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "backend/optimizer/common/helper.h"
#include "utils/utils.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -28,7 +29,8 @@ void CreateOutputsOfSquareSumAll(const FuncGraphPtr &graph, const CNodePtr &lars
MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(lars_v2);
if (lars_v2->size() != kLarsV2InputNum) {
MS_LOG(EXCEPTION) << "Op lars_v2's input not equal " << kLarsV2InputNum;
MS_LOG(EXCEPTION) << "Op lars_v2's input not equal " << kLarsV2InputNum
<< " trace: " << trace::DumpSourceLines(lars_v2);
}
std::vector<AnfNodePtr> inputs = {NewValueNode(std::make_shared<Primitive>(kSquareSumAllOpName)), lars_v2->input(1),
@ -50,10 +52,12 @@ CNodePtr CreateLarsV2Update(const FuncGraphPtr &graph, const CNodePtr &lars_v2,
MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(lars_v2);
if (square_sum_all_outputs.size() != 2) {
MS_LOG(EXCEPTION) << "square_sum_all_outputs' size not equal 2";
MS_LOG(EXCEPTION) << "square_sum_all_outputs' size not equal 2"
<< " trace: " << trace::DumpSourceLines(lars_v2);
}
if (lars_v2->size() != kLarsV2InputNum) {
MS_LOG(EXCEPTION) << "Op lars_v2's input not equal " << kLarsV2InputNum;
MS_LOG(EXCEPTION) << "Op lars_v2's input not equal " << kLarsV2InputNum
<< " trace: " << trace::DumpSourceLines(lars_v2);
}
std::vector<AnfNodePtr> inputs = {NewValueNode(std::make_shared<Primitive>(kLarsV2UpdateOpName)),
lars_v2->input(1),

@ -22,6 +22,7 @@
#include "runtime/device/kernel_info.h"
#include "ir/primitive.h"
#include "utils/utils.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -98,14 +99,16 @@ const AnfNodePtr LayerNormGradSplit::Process(const FuncGraphPtr &graph, const An
std::vector<AnfNodePtr> layer_norm_x_backprop_outputs;
CreateOutputsOfLayerNormXBackprop(graph, cnode, &layer_norm_x_backprop_outputs);
if (layer_norm_x_backprop_outputs.size() != kSingleOutputNum) {
MS_LOG(EXCEPTION) << "layer_norm_grad_outputs has wrong size";
MS_LOG(EXCEPTION) << "layer_norm_grad_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(node);
}
// create layer_norm_beta_gamma_backprop
std::vector<AnfNodePtr> layer_norm_beta_gamma_backprop_outputs;
CreateOutputsOfLayerNormBetaGammaBackprop(graph, cnode, &layer_norm_beta_gamma_backprop_outputs);
if (layer_norm_beta_gamma_backprop_outputs.size() != kLayerNormBetaGammaBackpropOutputNum) {
MS_LOG(EXCEPTION) << "layer_norm_beta_gamma_outputs has wrong size";
MS_LOG(EXCEPTION) << "layer_norm_beta_gamma_outputs has wrong size"
<< " trace: " << trace::DumpSourceLines(node);
}
std::vector<AnfNodePtr> make_tuple_inputs = {NewValueNode(prim::kPrimMakeTuple), layer_norm_x_backprop_outputs[0],

@ -17,6 +17,7 @@
#include <memory>
#include <vector>
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -45,7 +46,8 @@ AnfNodePtr CreateNewPack(const FuncGraphPtr &func_graph, const CNodePtr &origin_
axis += output_shape.size();
}
if (axis < 0) {
MS_LOG(EXCEPTION) << "The concat_dim value " << axis << "is out of range";
MS_LOG(EXCEPTION) << "The concat_dim value " << axis << "is out of range"
<< " trace: " << trace::DumpSourceLines(origin_pack_cnode);
}
std::vector<size_t> new_shape;
for (size_t i = 0; i < output_shape.size() + 1; ++i) {

@ -18,6 +18,7 @@
#include <memory>
#include "backend/session/anf_runtime_algorithm.h"
#include "backend/optimizer/common/helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -31,7 +32,7 @@ AnfNodePtr CreateBNTrainingReduce(const FuncGraphPtr &func_graph, const AnfNodeP
MS_EXCEPTION_IF_NULL(bn_cnode);
if (bn_cnode->inputs().size() < kBatchNormRealInputNum + 1) {
MS_LOG(EXCEPTION) << "The input size of node " + bn_cnode->DebugString() + " is less than "
<< kBatchNormRealInputNum + 1;
<< kBatchNormRealInputNum + 1 << " trace: " << trace::DumpSourceLines(bn);
}
std::vector<AnfNodePtr> bn_training_reduce_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingReduceOpName)), bn_cnode->input(1)};
@ -56,11 +57,12 @@ AnfNodePtr CreateBNTrainingUpdateV3(const FuncGraphPtr &func_graph, const AnfNod
MS_EXCEPTION_IF_NULL(bn_cnode);
if (bn_cnode->inputs().size() < kBatchNormRealInputNum + 1) {
MS_LOG(EXCEPTION) << "The input size of node " + bn_cnode->DebugString() + " is less than "
<< kBatchNormRealInputNum + 1;
<< kBatchNormRealInputNum + 1 << " trace: " << trace::DumpSourceLines(bn);
}
if (bn_training_reduce_outputs.size() != kBNTrainingReduceOutputNum) {
MS_LOG(EXCEPTION) << "The output size of node bn_training_reduce must be " << kBNTrainingReduceOutputNum
<< ", but it is " << bn_training_reduce_outputs.size();
<< ", but it is " << bn_training_reduce_outputs.size()
<< " trace: " << trace::DumpSourceLines(bn);
}
std::vector<AnfNodePtr> bn_training_update_v3_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingUpdateV3OpName)),
@ -76,7 +78,7 @@ AnfNodePtr CreateBNTrainingUpdateV3(const FuncGraphPtr &func_graph, const AnfNod
MS_EXCEPTION_IF_NULL(bn_abstract_tuple);
if (bn_abstract_tuple->elements().size() != kBatchNormOutputNum) {
MS_LOG(EXCEPTION) << "The abstract size of node bn must be " << kBatchNormOutputNum << ", but it is "
<< bn_abstract_tuple->elements().size();
<< bn_abstract_tuple->elements().size() << " trace: " << trace::DumpSourceLines(bn);
}
bn_training_update_v3->set_abstract(bn->abstract());
bn_training_update_v3->set_scope(bn->scope());

@ -17,6 +17,7 @@
#include <memory>
#include <vector>
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -35,7 +36,7 @@ CNodePtr CreateBaseSplitVNode(const FuncGraphPtr &func_graph, const CNodePtr &or
MS_EXCEPTION_IF_NULL(origin_cnode);
if (origin_cnode->inputs().size() < kSplitInputNum) {
MS_LOG(EXCEPTION) << "The input number of split: " << origin_cnode->DebugString() << " should be "
<< kSplitInputNum - 1;
<< kSplitInputNum - 1 << " trace: " << trace::DumpSourceLines(origin_cnode);
}
return CreateSplitVNode(func_graph, origin_cnode->input(1));
}

@ -18,6 +18,7 @@
#include "backend/optimizer/ascend/ascend_helper.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "debug/anf_ir_dump.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -99,7 +100,8 @@ bool TransDataSplit::DoSplit(const FuncGraphPtr &func_graph, const AnfNodePtr &n
manager->AddFuncGraph(func_graph);
if (!manager->Replace(node, new_replace_node)) {
MS_LOG(EXCEPTION) << "Manager replace node failed";
MS_LOG(EXCEPTION) << "Manager replace node failed"
<< " trace: " << trace::DumpSourceLines(node);
}
MS_LOG(INFO) << "Transdata node:" << cnode->DebugString() << "split success.";
return true;

@ -16,6 +16,7 @@
#include "backend/optimizer/ascend/ir_fusion/adam_apply_one_fusion.h"
#include "backend/optimizer/common/helper.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
const BaseRef AdamApplyOneFusion::DefinePattern() const {
@ -218,7 +219,8 @@ const AnfNodePtr AdamApplyOneFusion::Process(const FuncGraphPtr &func_graph, con
if (AnfAlgo::CheckPrimitiveType(node, prim::kPrimDepend)) {
auto iter_sub0 = (*equiv).find(sub0_var_);
if (iter_sub0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the sub0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the sub0 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
sub0 = utils::cast<AnfNodePtr>(iter_sub0->second);
}
@ -233,11 +235,13 @@ const AnfNodePtr AdamApplyOneFusion::Process(const FuncGraphPtr &func_graph, con
AbstractBasePtrList new_node_abstract_list;
auto iter_add0 = (*equiv).find(add0_var_);
if (iter_add0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add0 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto iter_add1 = (*equiv).find(add1_var_);
if (iter_add1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto add0 = utils::cast<AnfNodePtr>(iter_add0->second);
MS_EXCEPTION_IF_NULL(add0);
@ -253,7 +257,7 @@ const AnfNodePtr AdamApplyOneFusion::Process(const FuncGraphPtr &func_graph, con
CreateMultipleOutputsOfAnfNode(func_graph, new_node, kAdamApplyOneOutputNum, &new_node_outputs);
if (new_node_outputs.size() != kAdamApplyOneOutputNum) {
MS_LOG(EXCEPTION) << "The output size of node " << new_node->DebugString() << " should be "
<< kAdamApplyOneOutputNum;
<< kAdamApplyOneOutputNum << " trace: " << trace::DumpSourceLines(node);
}
auto manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);

@ -21,6 +21,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "ir/primitive.h"
#include "backend/optimizer/common/helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -281,7 +282,8 @@ const AnfNodePtr AdamApplyOneWithDecayRule::Process(const FuncGraphPtr &graph, c
if (AnfAlgo::CheckPrimitiveType(node, prim::kPrimDepend)) {
auto iter_sub0 = (*equiv).find(sub0_var_);
if (iter_sub0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the sub0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the sub0 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
sub0 = utils::cast<AnfNodePtr>(iter_sub0->second);
}
@ -296,11 +298,13 @@ const AnfNodePtr AdamApplyOneWithDecayRule::Process(const FuncGraphPtr &graph, c
auto iter_add0 = (*equiv).find(add0_var_);
if (iter_add0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add0 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto iter_add1 = (*equiv).find(add1_var_);
if (iter_add1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto add0 = utils::cast<AnfNodePtr>(iter_add0->second);
MS_EXCEPTION_IF_NULL(add0);

@ -19,6 +19,7 @@
#include "backend/optimizer/ascend/ir_fusion/input_to_output_registry.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "backend/kernel_compiler/oplib/oplib.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -107,7 +108,8 @@ const AnfNodePtr AddInputToOutput::Process(const FuncGraphPtr &func_graph, const
MS_EXCEPTION_IF_NULL(new_abstract_tuple);
CreateMultipleOutputsOfAnfNode(func_graph, cnode, new_abstract_tuple->size(), &new_outputs);
if (new_outputs.size() != new_abstract_tuple->size()) {
MS_LOG(EXCEPTION) << "Failed to create outputs of " << cnode->DebugString();
MS_LOG(EXCEPTION) << "Failed to create outputs of " << cnode->DebugString()
<< " trace: " << trace::DumpSourceLines(node);
}
return new_outputs[0];
}

@ -24,7 +24,7 @@
#include "utils/utils.h"
#include "abstract/abstract_value.h"
#include "backend/optimizer/common/helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
namespace {
@ -58,7 +58,8 @@ AnfNodePtr GetMul0(const FuncGraphPtr &graph, const AnfNodePtr &input2, const An
auto manager = graph->manager();
MS_EXCEPTION_IF_NULL(manager);
if (manager->node_users().find(input2) == manager->node_users().end()) {
MS_LOG(EXCEPTION) << "node has no output in manager";
MS_LOG(EXCEPTION) << "node has no output in manager"
<< " trace: " << trace::DumpSourceLines(input2);
}
AnfNodePtr mul0 = nullptr;

@ -19,6 +19,7 @@
#include "backend/optimizer/common/helper.h"
#include "backend/session/anf_runtime_algorithm.h"
#include "utils/utils.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
@ -41,7 +42,8 @@ void GetBNOutput(const FuncGraphPtr &func_graph, const AnfNodePtr &bn, std::vect
auto manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);
if (manager->node_users().find(bn) == manager->node_users().end()) {
MS_LOG(EXCEPTION) << "The bn node " << bn->DebugString() << " should has some outputs";
MS_LOG(EXCEPTION) << "The bn node " << bn->DebugString() << " should has some outputs"
<< " trace: " << trace::DumpSourceLines(bn);
}
for (const auto &node_index : manager->node_users()[bn]) {
const AnfNodePtr &output = node_index.first;
@ -113,7 +115,8 @@ AnfNodePtr FusedBatchNormFusion::CreateBNTrainingReduce(const FuncGraphPtr &func
// Set input to create node
auto iter_data_input0 = (*equiv).find(data_input0_var_);
if (iter_data_input0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input0 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
std::vector<AnfNodePtr> bn_training_reduce_inputs = {
NewValueNode(std::make_shared<Primitive>(kBNTrainingReduceOpName)),
@ -124,13 +127,15 @@ AnfNodePtr FusedBatchNormFusion::CreateBNTrainingReduce(const FuncGraphPtr &func
// Set abstract
auto iter_data_input1 = (*equiv).find(data_input1_var_);
if (iter_data_input1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input1 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto data_input1 = utils::cast<AnfNodePtr>(iter_data_input1->second);
MS_EXCEPTION_IF_NULL(data_input1);
auto iter_data_input2 = (*equiv).find(data_input2_var_);
if (iter_data_input2 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input2 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the data_input2 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto data_input2 = utils::cast<AnfNodePtr>(iter_data_input2->second);
MS_EXCEPTION_IF_NULL(data_input2);
@ -190,17 +195,19 @@ void FusedBatchNormFusion::GetBNTrainingUpdateAbstractList(const EquivPtr &equiv
MS_EXCEPTION_IF_NULL(bn_abstract_tuple);
if (bn_abstract_tuple->elements().size() < kBnOutputNum) {
MS_LOG(EXCEPTION) << "The abstract size of node bn must not be less than " << kBnOutputNum << ", but it is "
<< bn_abstract_tuple->elements().size();
<< bn_abstract_tuple->elements().size() << " trace: " << trace::DumpSourceLines(bn);
}
auto iter_variable_input0 = (*equiv).find(variable_input0_var_);
if (iter_variable_input0 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the variable_input0 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the variable_input0 var after matched."
<< " trace: " << trace::DumpSourceLines(bn);
}
auto variable_input0 = utils::cast<AnfNodePtr>(iter_variable_input0->second);
MS_EXCEPTION_IF_NULL(variable_input0);
auto iter_variable_input1 = (*equiv).find(variable_input1_var_);
if (iter_variable_input1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the variable_input1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the variable_input1 var after matched."
<< " trace: " << trace::DumpSourceLines(bn);
}
auto variable_input1 = utils::cast<AnfNodePtr>(iter_variable_input1->second);
MS_EXCEPTION_IF_NULL(variable_input1);
@ -222,7 +229,8 @@ AnfNodePtr FusedBatchNormFusion::CreateBNTrainingUpdate(
// Set abstract
auto iter_batch_norm = (*equiv).find(batch_norm_var_);
if (iter_batch_norm == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the batch_norm var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the batch_norm var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
AnfNodePtr bn = utils::cast<AnfNodePtr>(iter_batch_norm->second);
MS_EXCEPTION_IF_NULL(bn);
@ -260,12 +268,13 @@ const AnfNodePtr FusedBatchNormFusion::Process(const FuncGraphPtr &func_graph, c
&bn_training_update_outputs);
if (bn_training_update_outputs.size() < kBNTrainingUpdateOutputNum) {
MS_LOG(EXCEPTION) << "The output size of node bn must be " << kBNTrainingUpdateOutputNum << ", but it is "
<< bn_training_update_outputs.size();
<< bn_training_update_outputs.size() << " trace: " << trace::DumpSourceLines(node);
}
// Replace old bn outputs with new outputs
auto iter_batch_norm = (*equiv).find(batch_norm_var_);
if (iter_batch_norm == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the batch_norm var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the batch_norm var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
AnfNodePtr bn = utils::cast<AnfNodePtr>(iter_batch_norm->second);
std::vector<AnfNodePtr> bn_outputs;

@ -17,7 +17,7 @@
#include <utility>
#include "backend/session/anf_runtime_algorithm.h"
#include "frontend/optimizer/opt.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
AnfNodePtr LambNextMVWithDecayRule::GetLambNextMVWithDecayOutput(const FuncGraphPtr &func_graph,
@ -97,7 +97,8 @@ const AnfNodePtr LambNextMVWithDecayRule::Process(const FuncGraphPtr &func_graph
auto manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);
if (manager->node_users().find(mul4) == manager->node_users().end()) {
MS_LOG(EXCEPTION) << "The Mul4 should be used by at least another node input";
MS_LOG(EXCEPTION) << "The Mul4 should be used by at least another node input"
<< " trace: " << trace::DumpSourceLines(node);
}
AnfNodeIndexSet mul4_outputs = manager->node_users()[mul4];
auto iter = std::find_if(mul4_outputs.begin(), mul4_outputs.end(),

@ -22,7 +22,7 @@
#include "backend/session/anf_runtime_algorithm.h"
#include "frontend/optimizer/opt.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
namespace {
@ -31,27 +31,31 @@ std::tuple<AnfNodePtr, AnfNodePtr, AnfNodePtr, AnfNodePtr> GetSharedNodes(const
auto add3 = node->cast<CNodePtr>();
MS_EXCEPTION_IF_NULL(add3);
if (add3->inputs().size() < kAddInputNum) {
MS_LOG(EXCEPTION) << "The input size of Add3 is less than " << kAddInputNum;
MS_LOG(EXCEPTION) << "The input size of Add3 is less than " << kAddInputNum
<< " trace: " << trace::DumpSourceLines(node);
}
auto real_div2_anf = add3->input(1);
MS_EXCEPTION_IF_NULL(real_div2_anf);
auto real_div2 = real_div2_anf->cast<CNodePtr>();
MS_EXCEPTION_IF_NULL(real_div2);
if (real_div2->inputs().size() < kRealDivInputNum) {
MS_LOG(EXCEPTION) << "The input size of RealDiv2 is less than " << kRealDivInputNum;
MS_LOG(EXCEPTION) << "The input size of RealDiv2 is less than " << kRealDivInputNum
<< " trace: " << trace::DumpSourceLines(node);
}
auto sqrt0_anf = real_div2->input(2);
MS_EXCEPTION_IF_NULL(sqrt0_anf);
auto sqrt0 = sqrt0_anf->cast<CNodePtr>();
MS_EXCEPTION_IF_NULL(sqrt0);
if (sqrt0->inputs().size() < kRsqrtInputNum) {
MS_LOG(EXCEPTION) << "The input size of Sqrt0 is less than " << kSqrtInputNum;
MS_LOG(EXCEPTION) << "The input size of Sqrt0 is less than " << kSqrtInputNum
<< " trace: " << trace::DumpSourceLines(node);
}
auto add2_anf = sqrt0->input(1);
MS_EXCEPTION_IF_NULL(add2_anf);
auto add2 = add2_anf->cast<CNodePtr>();
if (add2->inputs().size() < kAddInputNum) {
MS_LOG(EXCEPTION) << "The input size of Add2 is less than " << kAddInputNum;
MS_LOG(EXCEPTION) << "The input size of Add2 is less than " << kAddInputNum
<< " trace: " << trace::DumpSourceLines(node);
}
return std::make_tuple(add3->input(2), real_div2->input(1), add2->input(1), add2->input(2));
}
@ -101,10 +105,12 @@ std::tuple<AnfNodePtr, AnfNodePtr> GetAdd0Add1Nodes(const AnfNodePtr &real_div0_
MS_EXCEPTION_IF_NULL(real_div0);
MS_EXCEPTION_IF_NULL(real_div1);
if (real_div0->inputs().size() != kRealDivInputNum) {
MS_LOG(EXCEPTION) << "RealDiv0 has wrong input size";
MS_LOG(EXCEPTION) << "RealDiv0 has wrong input size"
<< " trace: " << trace::DumpSourceLines(real_div0_anf);
}
if (real_div1->inputs().size() != kRealDivInputNum) {
MS_LOG(EXCEPTION) << "RealDiv1 has wrong input size";
MS_LOG(EXCEPTION) << "RealDiv1 has wrong input size"
<< " trace: " << trace::DumpSourceLines(real_div1_anf);
}
return std::make_tuple(real_div0->input(1), real_div1->input(1));
}
@ -165,7 +171,8 @@ const AnfNodePtr LambNextMVWithDecayV1Rule::Process(const FuncGraphPtr &func_gra
auto manager = func_graph->manager();
MS_EXCEPTION_IF_NULL(manager);
if (manager->node_users().find(mul4) == manager->node_users().end()) {
MS_LOG(EXCEPTION) << "The Mul4 should be used by at least another node input";
MS_LOG(EXCEPTION) << "The Mul4 should be used by at least another node input"
<< " trace: " << trace::DumpSourceLines(node);
}
AnfNodeIndexSet mul4_output_node_index_set = manager->node_users()[mul4];
auto iter = std::find_if(
@ -195,7 +202,8 @@ const AnfNodePtr LambNextMVWithDecayV1Rule::Process(const FuncGraphPtr &func_gra
std::vector<AnfNodePtr> fusion_node_outputs;
CreateMultipleOutputsOfAnfNode(func_graph, fusion_node, kLambNextMVWithDecayV1OutputNum, &fusion_node_outputs);
if (fusion_node_outputs.size() != kLambNextMVWithDecayV1OutputNum) {
MS_LOG(EXCEPTION) << "create multiple outputs for fusion node fail!";
MS_LOG(EXCEPTION) << "create multiple outputs for fusion node fail!"
<< " trace: " << trace::DumpSourceLines(node);
}
(void)manager->Replace(add0, fusion_node_outputs[1]);

@ -16,7 +16,7 @@
#include "backend/optimizer/ascend/ir_fusion/lamb_next_right_rule.h"
#include <vector>
#include "backend/optimizer/common/helper.h"
#include "utils/trace_base.h"
namespace mindspore {
namespace opt {
AnfNodePtr LambNextRightRule::CreateLambNextRightNode(const FuncGraphPtr &func_graph, const EquivPtr &equiv) const {
@ -69,7 +69,8 @@ const AnfNodePtr LambNextRightRule::Process(const FuncGraphPtr &func_graph, cons
// Set abstract of new node
auto iter_add1 = (*equiv).find(add1_var_);
if (iter_add1 == (*equiv).end()) {
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched.";
MS_LOG(EXCEPTION) << "The equiv map is expected to contains the add1 var after matched."
<< " trace: " << trace::DumpSourceLines(node);
}
auto add1 = utils::cast<AnfNodePtr>(iter_add1->second);
MS_EXCEPTION_IF_NULL(add1);

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save