diff --git a/mindspore/lite/src/ops/floor.cc b/mindspore/lite/src/ops/floor.cc index 12148c0883..80e4bc1122 100644 --- a/mindspore/lite/src/ops/floor.cc +++ b/mindspore/lite/src/ops/floor.cc @@ -22,8 +22,35 @@ namespace mindspore { namespace lite { -#ifndef PRIMITIVE_WRITEABLE - +#ifdef PRIMITIVE_WRITEABLE +int Floor::UnPackAttr(const Primitive &prim, const std::vector &inputs) { + if (this->primitive_ == nullptr) { + this->primitive_ = new (std::nothrow) schema::PrimitiveT; + if (this->primitive_ == nullptr) { + MS_LOG(ERROR) << "new primitiveT failed"; + return RET_ERROR; + } + this->primitive_->value.type = schema::PrimitiveType_Floor; + } + if (this->primitive_->value.type != schema::PrimitiveType_Floor) { + MS_LOG(ERROR) << "Primitive type is error :" << this->primitive_->value.type; + delete this->primitive_; + this->primitive_ = nullptr; + return RET_ERROR; + } + if (this->primitive_->value.value == nullptr) { + auto attr = new (std::nothrow) schema::FloorT(); + if (attr == nullptr) { + delete this->primitive_; + this->primitive_ = nullptr; + MS_LOG(ERROR) << "new primitiveT value failed"; + return RET_ERROR; + } + this->primitive_->value.value = attr; + } + return RET_OK; +} +#else int Floor::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) { MS_ASSERT(nullptr != primitive); MS_ASSERT(nullptr != fbb); diff --git a/mindspore/lite/src/ops/floor.h b/mindspore/lite/src/ops/floor.h index 285fa61ca9..54a1ad566f 100644 --- a/mindspore/lite/src/ops/floor.h +++ b/mindspore/lite/src/ops/floor.h @@ -31,6 +31,7 @@ class Floor : public ArithmeticSelf { #ifdef PRIMITIVE_WRITEABLE MS_DECLARE_PARENT(Floor, ArithmeticSelf); explicit Floor(schema::PrimitiveT *primitive) : ArithmeticSelf(primitive) {} + int UnPackAttr(const Primitive &prim, const std::vector &inputs); #else int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; #endif diff --git a/mindspore/lite/src/ops/primitive_c.cc b/mindspore/lite/src/ops/primitive_c.cc index 1a0b076404..08ed5d49bd 100644 --- a/mindspore/lite/src/ops/primitive_c.cc +++ b/mindspore/lite/src/ops/primitive_c.cc @@ -219,12 +219,6 @@ void PrimitiveC::CalFloatScopeByMeanAndStddev(const double &mean, const double & void PrimitiveC::FillDefaultInputQuantParamIfNeed(const size_t &inputSize) { std::vector quants; schema::QuantParamT quantParam; - // fill input_quant_param_ by not inited quant_parm - if (input_quant_param_.size() < inputSize) { - schema::QuantParamT tmpQuantParam; - quants.emplace_back(tmpQuantParam); - input_quant_param_.insert(input_quant_param_.end(), inputSize - input_quant_param_.size(), quants); - } if (input_quant_param_.size() == kDoubleNum) { quants.clear(); @@ -235,6 +229,12 @@ void PrimitiveC::FillDefaultInputQuantParamIfNeed(const size_t &inputSize) { quants.emplace_back(quantParam); input_quant_param_.emplace_back(quants); } + // fill input_quant_param_ by not inited quant_parm + if (input_quant_param_.size() < inputSize) { + schema::QuantParamT tmpQuantParam; + quants.emplace_back(tmpQuantParam); + input_quant_param_.insert(input_quant_param_.end(), inputSize - input_quant_param_.size(), quants); + } } void PrimitiveC::PopulaterInputQuantParam(const Primitive &prim, const std::vector &inputs, @@ -574,6 +574,10 @@ std::shared_ptr PrimitiveC::Create(const Primitive &prim, const std: return NewPrimitiveC(prim, inputs, quantType); } else if (op_type == "ResizeBilinear") { return NewPrimitiveC(prim, inputs, quantType); + } else if (op_type == "Floor") { + return NewPrimitiveC(prim, inputs, quantType); + } else if (op_type == "Minimum") { + return NewPrimitiveC(prim, inputs, quantType); #ifdef SUPPORT_TRAIN } else if (op_type == "SoftmaxCrossEntropyWithLogits") { diff --git a/mindspore/lite/test/models_mindspore.cfg b/mindspore/lite/test/models_mindspore.cfg index 229fb05d7b..3b8a8a12b1 100644 --- a/mindspore/lite/test/models_mindspore.cfg +++ b/mindspore/lite/test/models_mindspore.cfg @@ -9,3 +9,4 @@ mobilefacenet_iod.mindir effnet_iod.mindir resnext50.mindir ocr_mobilenetV2.mindir +mindspore_ghostnet_ssd_13x.mindir diff --git a/mindspore/lite/tools/anf_importer/import_from_protobuf.cc b/mindspore/lite/tools/anf_importer/import_from_protobuf.cc index a647a35a59..23783252e1 100644 --- a/mindspore/lite/tools/anf_importer/import_from_protobuf.cc +++ b/mindspore/lite/tools/anf_importer/import_from_protobuf.cc @@ -791,13 +791,13 @@ int AnfImporterFromProtobuf::ImportNodesForGraph(const FuncGraphPtr &outputFuncG cnode_ptr = BuildCNodeForFuncGraph(outputFuncGraph, node_proto, quantType); if (cnode_ptr == nullptr) { MS_LOG(ERROR) << "Build CNode for funcgraph fail at index: : " << i; - status = (status == RET_OK ? RET_NULL_PTR : status); + return RET_ERROR; } auto primitive_c = GetValueNode>(cnode_ptr->input(0)); if (primitive_c == nullptr) { MS_LOG(ERROR) << "primitive_c is nullptr"; - status = RET_ERROR; + return RET_ERROR; } #ifdef SUPPORT_TRAIN diff --git a/mindspore/lite/tools/converter/anf_transform.cc b/mindspore/lite/tools/converter/anf_transform.cc index bccd5bea2b..8eb0283fc2 100644 --- a/mindspore/lite/tools/converter/anf_transform.cc +++ b/mindspore/lite/tools/converter/anf_transform.cc @@ -55,12 +55,15 @@ FuncGraphPtr AnfTransform::Transform(const FuncGraphPtr &old_graph, const conver MS_LOG(ERROR) << "config shoud be specified"; return nullptr; } - // fusion const_fold auto optimizer = std::make_shared(); auto pm = std::make_shared("anf fusion pass manager", false); auto graph_pm = std::make_shared("anf graph pass manager", true); auto convert_pm = std::make_shared("anf graph convert pass manager", true); + // fusion const_fold + auto cf_pm = std::make_shared("constant folding pass manager", false); + cf_pm->AddPass(std::make_shared()); + // for now - trainning is not supporting fuse operations if (config != nullptr && !config->trainModel) { // remove quantdtype when awaretraining @@ -114,9 +117,9 @@ FuncGraphPtr AnfTransform::Transform(const FuncGraphPtr &old_graph, const conver remove_unused_transpose_pass->SetFmkType(config->fmk); pm->AddPass(remove_unused_transpose_pass); } - pm->AddPass(std::make_shared()); pm->AddPass(std::make_shared()); convert_pm->AddPass(std::make_shared()); + optimizer->AddPassManager(cf_pm); optimizer->AddPassManager(convert_pm); optimizer->AddPassManager(pm); optimizer->AddPassManager(graph_pm); diff --git a/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc b/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc index 45980e29d1..36a058a76b 100644 --- a/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc +++ b/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc @@ -833,7 +833,7 @@ STATUS PostTrainingQuantizer::QuantNode() { MS_LOG(WARNING) << "index value node is null"; continue; } - size_t index = GetValue(index_value_node->value()); + size_t index = CastToInt(index_value_node->value(), false).front(); auto input_node = cnode->input(1); MS_ASSERT(input_node != nullptr); auto input_cnode = std::dynamic_pointer_cast(input_node); diff --git a/mindspore/lite/tools/optimizer/common/gllo_utils.cc b/mindspore/lite/tools/optimizer/common/gllo_utils.cc index f19679dd60..baadfc4c28 100644 --- a/mindspore/lite/tools/optimizer/common/gllo_utils.cc +++ b/mindspore/lite/tools/optimizer/common/gllo_utils.cc @@ -510,7 +510,7 @@ bool CheckIsAllInputsParam(const AnfNodePtr &node) { if (utils::isa(node)) { auto cnode = node->cast(); for (size_t i = 1; i < cnode->inputs().size(); i++) { - if (!utils::isa(cnode->input(i))) { + if (!utils::isa(cnode->input(i)) && !utils::isa(cnode->input(i))) { return false; } } @@ -589,7 +589,7 @@ size_t GetTupleGetItemOutIndex(const CNodePtr &tuple_get_item) { MS_ASSERT(output_index_value_node != nullptr); auto value_node = output_index_value_node->cast(); MS_ASSERT(value_node != nullptr); - return IntToSize(GetValue(value_node->value())); + return IntToSize(lite::CastToInt(value_node->value(), false).front()); } std::shared_ptr>> GetRealNodeUsedListByOutputIdx(const FuncGraphPtr &graph, const AnfNodePtr &node, diff --git a/mindspore/lite/tools/optimizer/graph/weight_format_hardcode_pass.cc b/mindspore/lite/tools/optimizer/graph/weight_format_hardcode_pass.cc index e161e49ddc..86fe3e6345 100644 --- a/mindspore/lite/tools/optimizer/graph/weight_format_hardcode_pass.cc +++ b/mindspore/lite/tools/optimizer/graph/weight_format_hardcode_pass.cc @@ -121,7 +121,12 @@ lite::STATUS WeightFormatHardCodePass::HardCodeMS(const AnfNodePtr &conv_node, if (op_type == schema::PrimitiveType_Conv2D) { param_value->set_format(schema::Format::Format_KCHW); } else if (op_type == schema::PrimitiveType_DepthwiseConv2D) { - param_value->set_format(schema::Format::Format_CKHW); + // the format is initialized to NUM_OF_FORMAT, and set to NHWC in const folding. + if (param_value->format() == schema::Format::Format_NHWC) { + param_value->set_format(schema::Format::Format_KCHW); + } else { + param_value->set_format(schema::Format::Format_CKHW); + } } else if (op_type == schema::PrimitiveType_DeDepthwiseConv2D) { param_value->set_format(schema::Format::Format_CKHW); } else if (op_type == schema::PrimitiveType_DeConv2D) {