modify reduceminD and reducemaxD IR

pull/130/head^2
yanghaoran 5 years ago committed by mxm
parent c24252b2cc
commit b53c974513

2
.gitmodules vendored

@ -12,4 +12,4 @@
url = https://github.com/protocolbuffers/protobuf.git
[submodule "graphengine"]
path = graphengine
url = https://gitee.com/mindspore/graphengine.git
url = https://gitee.com/ms-incubator/graphengine.git

@ -1 +1 @@
Subproject commit 5f763679fa33de1608d07f7651c6f16012b953ea
Subproject commit 092c7a1f6548cac7d40e677af3498c3c49ea2bfd

@ -189,7 +189,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{string(kNameApplyMomentum), ADPT_DESC(ApplyMomentum)},
{string(kNameMaxPool), ADPT_DESC(MaxPool)},
{string(kNameAvgPool), ADPT_DESC(AvgPool)},
{string(kNameTopK), ADPT_DESC(TopKV2)},
{string(kNameTopK), ADPT_DESC(TopK)},
{string(kNamePack), ADPT_DESC(Pack)},
{string(kNameSplitD), ADPT_DESC(SplitD)},
{string(kNameAllReduce), ADPT_DESC(HcomAllReduce)},
@ -310,7 +310,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{prim::kPrimMinimum->name(), ADPT_DESC(Minimum)},
{prim::kPrimSelect->name(), ADPT_DESC(Select)},
{string(kNameLessEqual), ADPT_DESC(LessEqual)},
{prim::kPrimLogSoftmax->name(), ADPT_DESC(LogSoftmax)},
{prim::kPrimLogSoftmax->name(), ADPT_DESC(LogSoftmaxV2)},
{string(kNameTruncatedNormal), ADPT_DESC(TruncatedNormal)},
{string(kNameStridedSliceGrad), ADPT_DESC(StridedSliceGrad)},
{prim::kPrimGelu->name(), ADPT_DESC(Gelu)},
@ -343,7 +343,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{prim::kPrimMatMul->name(), ADPT_DESC(MatMul)},
{string(kNameConst), ADPT_DESC(Constant, Const)},
{string(kNameSoftmax), ADPT_DESC(Softmax)},
{string(kNameSoftmax), ADPT_DESC(SoftmaxV2)},
{string(kNameSoftmaxGrad), ADPT_DESC(SoftmaxGrad)},
{string(kNameParam), ADPT_DESC(Data)},
{string(kNameROIAlign), ADPT_DESC(ROIAlign)},
@ -1017,8 +1017,8 @@ DfGraphConvertor &DfGraphConvertor::BuildGraph() {
}
}
// set up dependices
MS_LOG(DEBUG) << "set up dependices";
// set up dependencies
MS_LOG(DEBUG) << "set up dependencies";
std::vector<AnfNodePtr> nodes = ::mindspore::TopoSort(anf_graph_->get_return());
for (auto &it : nodes) {
SetNodeInput(it);
@ -1115,8 +1115,8 @@ void DfGraphConvertor::UpdateDataOpDesc(const AnfNodePtr &it, const OperatorPtr
if (desc == nullptr) {
MS_LOG(ERROR) << "Update data op descriptor failed! TensorDesc is null.";
} else {
(void)std::static_pointer_cast<Data>(op)->update_input_desc_data(*desc);
(void)std::static_pointer_cast<Data>(op)->update_output_desc_out(*desc);
(void)std::static_pointer_cast<Data>(op)->update_input_desc_x(*desc);
(void)std::static_pointer_cast<Data>(op)->update_output_desc_y(*desc);
}
}

File diff suppressed because it is too large Load Diff

@ -209,8 +209,8 @@ DECLARE_OP_USE_OUTPUT(Merge)
DECLARE_OP_ADAPTER(Switch)
DECLARE_OP_USE_OUTPUT(Switch)
DECLARE_OP_ADAPTER(TopKV2)
DECLARE_OP_USE_OUTPUT(TopKV2)
DECLARE_OP_ADAPTER(TopK)
DECLARE_OP_USE_OUTPUT(TopK)
DECLARE_OP_ADAPTER(RealDiv)
DECLARE_OP_USE_OUTPUT(RealDiv)
@ -260,8 +260,8 @@ DECLARE_OP_ADAPTER(Select)
DECLARE_OP_USE_OUTPUT(Select)
DECLARE_OP_ADAPTER(LessEqual)
DECLARE_OP_USE_OUTPUT(LessEqual)
DECLARE_OP_ADAPTER(LogSoftmax)
DECLARE_OP_USE_OUTPUT(LogSoftmax)
DECLARE_OP_ADAPTER(LogSoftmaxV2)
DECLARE_OP_USE_OUTPUT(LogSoftmaxV2)
DECLARE_OP_ADAPTER(TruncatedNormal)
DECLARE_OP_USE_OUTPUT(TruncatedNormal)
DECLARE_OP_ADAPTER(StridedSliceGrad)
@ -391,8 +391,8 @@ DECLARE_OP_ADAPTER(Sigmoid)
DECLARE_OP_USE_OUTPUT(Sigmoid)
DECLARE_OP_ADAPTER(SigmoidGrad)
DECLARE_OP_USE_OUTPUT(SigmoidGrad)
DECLARE_OP_ADAPTER(Softmax)
DECLARE_OP_USE_OUTPUT(Softmax)
DECLARE_OP_ADAPTER(SoftmaxV2)
DECLARE_OP_USE_OUTPUT(SoftmaxV2)
DECLARE_OP_ADAPTER(SoftmaxGrad)
DECLARE_OP_USE_OUTPUT(SoftmaxGrad)
DECLARE_OP_ADAPTER(Greater)

Loading…
Cancel
Save