!12836 Change return in core_ops

From: @liangzhibo
Reviewed-by: @kingxian,@jpc_chenjianping
Signed-off-by: @kingxian
pull/12836/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 423dcfc917

@ -1312,7 +1312,7 @@ class IrParser {
return nullptr;
}
PrimitivePtr prim = std::make_shared<Primitive>("return");
PrimitivePtr prim = std::make_shared<Primitive>("Return");
ValueNodePtr input0 = std::make_shared<ValueNode>(prim);
std::vector<AnfNodePtr> inputs;
inputs.push_back(input0);

@ -345,7 +345,7 @@ constexpr char REDUCED_SHAPE[] = "reduced_shape";
constexpr char TUPLE_DIV[] = "tuple_div";
constexpr char TUPLE_TO_ARRAY[] = "tuple_to_array";
constexpr char VIRTUALLOSS[] = "VirtualLoss";
constexpr char RETURN[] = "return";
constexpr char RETURN[] = "Return";
constexpr char ENV_GETITEM[] = "env_getitem";
constexpr char IDENTITY[] = "identity";
constexpr char PARTIAL[] = "partial";

@ -184,7 +184,7 @@ constexpr auto kBNInferGradOpName = "BNInferGrad";
constexpr auto kCallOpName = "call";
constexpr auto kPartialOpName = "partial";
constexpr auto kSwitchOpName = "switch";
constexpr auto kReturnOpName = "return";
constexpr auto kReturnOpName = "Return";
constexpr auto kLarsV2OpName = "LarsV2";
constexpr auto kLarsV2UpdateOpName = "LarsV2Update";
constexpr auto kSquareSumAllOpName = "SquareSumAll";

@ -63,7 +63,7 @@ using InstType = std::pair<Instruction, VectorRef>;
using InstSet = std::vector<InstType>;
using InstFunctionMap = std::map<Instruction, std::function<void(const VectorRef &)>>;
const std::vector<std::string> inst_str{"call", "tail_call", "return", "partial", "switch",
const std::vector<std::string> inst_str{"call", "tail_call", "Return", "partial", "switch",
"switch_return", "tuple", "input", "external", "push",
"primitive", "graph", "pad_stack", "switch_layer"};
class StructPartial : public Base {

@ -401,7 +401,7 @@ inline const PrimitivePtr kPrimFloorMod = std::make_shared<Primitive>("FloorMod"
inline const PrimitivePtr kPrimWhere = std::make_shared<Primitive>("Where");
// Statements
inline const PrimitivePtr kPrimReturn = std::make_shared<Primitive>("return");
inline const PrimitivePtr kPrimReturn = std::make_shared<Primitive>("Return");
inline const PrimitivePtr kPrimSwitch = std::make_shared<Primitive>("switch");
inline const PrimitivePtr kPrimSwitchLayer = std::make_shared<Primitive>("switch_layer");
inline const PrimitivePtr kPrimAssign = std::make_shared<Primitive>("Assign");

@ -26,7 +26,7 @@ namespace mindspore {
static const std::set<std::string> PARALLEL_BLACK_LIST_ = {prim::kTupleGetItem, "J", "list_getitem",
"array_getitem", "tuple_setitem", "Depend", "list_setitem", "array_setitem", "dict_getitem",
"list_append", "list_map", "list_reduce", "tuple_reversed", "tile_shape", "tuple_div", "tuple_to_array",
"make_dict", "make_slice", "make_record", "string_equal", "VirtualLoss", "return", "env_getitem",
"make_dict", "make_slice", "make_record", "string_equal", "VirtualLoss", "Return", "env_getitem",
"identity", "partial", "env_setitem", "env_getitem", "env_add", "MakeRefKey", "make_ref", "get_ref_key",
"get_ref_value", "get_ref_origin", "dot", "im2col", "col2im", "im2col_v1", "state_setitem", "ScalarSummary",
"ImageSummary", "TensorSummary", "Debug", "HistogramSummary", "col2im_v1", "resolve", "BroadcastGradientArgs",

@ -249,7 +249,7 @@ int AnfImporterFromMetaGraphT::AddReturnCNode() {
op_inputs.emplace_back(make_tuple_cnode);
auto cnode = func_graph_->NewCNode(op_inputs);
MS_ASSERT(nullptr != cnode);
cnode->set_fullname_with_scope("return");
cnode->set_fullname_with_scope("Return");
func_graph_->set_return(cnode);
} else {
auto return_prim_ptr = GetReturnPrim();
@ -270,7 +270,7 @@ int AnfImporterFromMetaGraphT::AddReturnCNode() {
MS_LOG(ERROR) << "NewCNode is nullptr";
return RET_NULL_PTR;
}
return_cnode->set_fullname_with_scope("return");
return_cnode->set_fullname_with_scope("Return");
func_graph_->set_return(return_cnode);
}
return RET_OK;

@ -272,7 +272,7 @@ STATUS CaffeModelParser::ConvertGraphOutputs() {
op_inputs.emplace_back(value_node);
op_inputs.emplace_back(make_tuple_cnode);
auto cnode = func_graph_ptr_->NewCNode(op_inputs);
cnode->set_fullname_with_scope("return");
cnode->set_fullname_with_scope("Return");
func_graph_ptr_->set_return(cnode);
} else {
auto returnPrim = GetReturnPrim();
@ -293,7 +293,7 @@ STATUS CaffeModelParser::ConvertGraphOutputs() {
}
opInputs.emplace_back(cnode);
auto returnCnode = func_graph_ptr_->NewCNode(opInputs);
returnCnode->set_fullname_with_scope("return");
returnCnode->set_fullname_with_scope("Return");
func_graph_ptr_->set_return(returnCnode);
}
return RET_OK;

@ -392,7 +392,7 @@ STATUS OnnxModelParser::BuildReturnNode(const FuncGraphPtr &anf_graph, const std
MS_LOG(ERROR) << "new cnode error";
return RET_ERROR;
}
returnCnode->set_fullname_with_scope("return");
returnCnode->set_fullname_with_scope("Return");
anf_graph->set_return(returnCnode);
return RET_OK;
}

@ -908,7 +908,7 @@ STATUS TFModelParser::MakeAnfGraphOutputs(std::vector<AnfNodePtr> *output_nodes,
auto value_node = NewValueNode(return_prim_ptr);
std::vector<AnfNodePtr> op_inputs = {value_node, make_tuple_cnode};
auto cnode = anf_graph->NewCNode(op_inputs);
cnode->set_fullname_with_scope("return");
cnode->set_fullname_with_scope("Return");
anf_graph->set_return(cnode);
} else {
auto return_prim_ptr = GetReturnPrim();
@ -919,7 +919,7 @@ STATUS TFModelParser::MakeAnfGraphOutputs(std::vector<AnfNodePtr> *output_nodes,
auto value_node = NewValueNode(return_prim_ptr);
std::vector<AnfNodePtr> op_inputs{value_node, output_nodes->front()};
auto return_cnode = anf_graph->NewCNode(op_inputs);
return_cnode->set_fullname_with_scope("return");
return_cnode->set_fullname_with_scope("Return");
anf_graph->set_return(return_cnode);
}
return RET_OK;

@ -319,7 +319,7 @@ STATUS TfliteModelParser::ConvertGraphOutputs() {
op_inputs.emplace_back(value_node);
op_inputs.emplace_back(make_tuple_cnode);
auto cnode = func_graph_->NewCNode(op_inputs);
cnode->set_fullname_with_scope("return");
cnode->set_fullname_with_scope("Return");
func_graph_->set_return(cnode);
} else {
auto returnPrim = GetReturnPrim();
@ -339,7 +339,7 @@ STATUS TfliteModelParser::ConvertGraphOutputs() {
}
op_inputs.emplace_back(cnode);
auto returnCnode = func_graph_->NewCNode(op_inputs);
returnCnode->set_fullname_with_scope("return");
returnCnode->set_fullname_with_scope("Return");
func_graph_->set_return(returnCnode);
}
return RET_OK;

@ -90,7 +90,7 @@ TEST_F(TestCloner, test_clone_simple) {
Cloner cl2(gs);
auto g3 = cl2[g];
std::vector<Primitive> results = {Primitive(prim::kScalarAdd), Primitive(prim::kScalarMul), Primitive("return")};
std::vector<Primitive> results = {Primitive(prim::kScalarAdd), Primitive(prim::kScalarMul), Primitive("Return")};
AnfNodeSet d3 = AnfNodeSet(DeepScopedGraphSearch(g3->get_return()));
common = d1 & d3;
for (auto& x : common) {

@ -315,7 +315,7 @@ TEST_F(TestOps, SwitchTest) {
}
TEST_F(TestOps, ReturnTest) {
auto prim = std::make_shared<Primitive>("return");
auto prim = std::make_shared<Primitive>("Return");
ASSERT_EQ(prim->name(), kPrimReturn->name());
}

@ -348,17 +348,17 @@ TEST_F(TestStepParallel, CreatOpInstance1) {
}
TEST_F(TestStepParallel, OperatorInstance) {
// creat attrs and prim
// create attrs and prim
PrimitivePtr prim = NewValueNode(prim::kPrimMatMul)->value()->cast<PrimitivePtr>();
ValuePtr transpose_a = MakeValue(false);
ValuePtr transpose_b = MakeValue(false);
prim->set_attr("transpose_a", transpose_a);
prim->set_attr("transpose_b", transpose_b);
auto attrs = prim->attrs();
// creat strategy
// create strategy
Strategys strategy = {{2, 2}, {2, 4}};
StrategyPtr strategyPtr = parallel::NewStrategy(0, strategy);
// creat shape
// create shape
Shapes inputs_shape = std::vector<Shape>{{64, 32}, {32, 64}};
Shapes outputs_shape = std::vector<Shape>{{64, 64}};
std::vector<Shapes> shape = {inputs_shape, outputs_shape};
@ -433,7 +433,7 @@ TEST_F(TestStepParallel, ForwardCommunication1) {
}
auto &inputs = node->cast<CNodePtr>()->inputs();
PrimitivePtr prim = inputs[0]->cast<ValueNodePtr>()->value()->cast<PrimitivePtr>();
if (prim->name() == "return" || prim->name() == "MatMul") {
if (prim->name() == "Return" || prim->name() == "MatMul") {
if (!inputs[1]->isa<Parameter>()) {
CNodePtr pre_node = inputs[1]->cast<CNodePtr>();
PrimitivePtr pre_prim = pre_node->input(0)->cast<ValueNodePtr>()->value()->cast<PrimitivePtr>();
@ -497,7 +497,7 @@ TEST_F(TestStepParallel, ForwardCommunication3) {
}
TEST_F(TestStepParallel, GetTensorInLayout) {
// creat attrs and prim
// create attrs and prim
FuncGraphPtr func_graph = std::make_shared<FuncGraph>();
Shape inputs_x_dims = {64, 32};
Shape inputs_y_dims = {32, 64};
@ -511,10 +511,10 @@ TEST_F(TestStepParallel, GetTensorInLayout) {
prim->set_attr("transpose_a", transpose_a);
prim->set_attr("transpose_b", transpose_b);
auto attrs = prim->attrs();
// creat strategy
// create strategy
Strategys strategy = {{2, 2}, {2, 4}};
StrategyPtr strategyPtr = parallel::NewStrategy(0, strategy);
// creat shape
// create shape
Shapes inputs_shape = std::vector<Shape>{{64, 32}, {32, 64}};
Shapes outputs_shape = std::vector<Shape>{{64, 64}};
std::vector<Shapes> shape = {inputs_shape, outputs_shape};

@ -58,7 +58,7 @@ void TestSpecializeGraph::SetUp() {
*/
graph_g_ = std::make_shared<FuncGraph>();
ParameterPtr y = graph_g_->add_parameter();
auto prim_return = std::make_shared<Primitive>("return");
auto prim_return = std::make_shared<Primitive>("Return");
std::vector<AnfNodePtr> inputs;
inputs.push_back(NewValueNode(prim_return));
inputs.push_back(y);
@ -101,7 +101,7 @@ void TestSpecializeGraph::SetUp() {
inputs.push_back(y);
CNodePtr cnode_add = graph_beta_->NewCNode(inputs);
inputs.clear();
inputs.push_back(NewValueNode(std::make_shared<Primitive>("return")));
inputs.push_back(NewValueNode(std::make_shared<Primitive>("Return")));
inputs.push_back(cnode_add);
CNodePtr cnode_return = graph_beta_->NewCNode(inputs);
graph_beta_->set_return(cnode_return);
@ -173,7 +173,7 @@ class MetaScalarAdd : public MetaFuncGraph {
inputs.push_back(x);
inputs.push_back(y);
CNodePtr cnode_add = graph_g->NewCNode(inputs);
auto prim_return = std::make_shared<Primitive>("return");
auto prim_return = std::make_shared<Primitive>("Return");
inputs.clear();
inputs.push_back(NewValueNode(prim_return));
inputs.push_back(cnode_add);
@ -202,7 +202,7 @@ void TestSpecializeMetaFuncGraph::SetUp() {
inputs.push_back(x);
inputs.push_back(y);
CNodePtr cnode_add = graph_->NewCNode(inputs);
auto prim_return = std::make_shared<Primitive>("return");
auto prim_return = std::make_shared<Primitive>("Return");
inputs.clear();
inputs.push_back(NewValueNode(prim_return));
inputs.push_back(cnode_add);

@ -133,7 +133,7 @@ static KernelGraphPtr CreateKernelGraph() {
next_cnode_ptr = kernelptr_floor;
// return res
auto p_return = std::make_shared<Primitive>("return");
auto p_return = std::make_shared<Primitive>("Return");
inputs.clear();
inputs.push_back(NewValueNode(p_return));
inputs.push_back(next_cnode_ptr);

@ -171,7 +171,7 @@ TEST_F(TestConvert, TestConvertBatchNorm) {
CNodePtr cnode_relu = anf_graph->NewCNode(inputs);
inputs.clear();
inputs.push_back(NewValueNode(std::make_shared<Primitive>("return")));
inputs.push_back(NewValueNode(std::make_shared<Primitive>("Return")));
inputs.push_back(cnode_relu);
CNodePtr cnode_return = anf_graph->NewCNode(inputs);
anf_graph->set_return(cnode_return);
@ -811,7 +811,7 @@ TEST_F(TestConvert, TestConvertMakeTuple) {
}
CNodePtr cnode_prim = func_graph->NewCNode(inputs);
inputs.clear();
inputs.push_back(NewValueNode(std::make_shared<Primitive>("return")));
inputs.push_back(NewValueNode(std::make_shared<Primitive>("Return")));
inputs.push_back(cnode_prim);
CNodePtr cnode_return = func_graph->NewCNode(inputs);
func_graph->set_return(cnode_return);

@ -90,7 +90,7 @@ FuncGraphPtr MakeFuncGraph(const PrimitivePtr prim, unsigned int nparam) {
}
CNodePtr cnode_prim = func_graph->NewCNode(inputs);
inputs.clear();
inputs.push_back(NewValueNode(std::make_shared<Primitive>("return")));
inputs.push_back(NewValueNode(std::make_shared<Primitive>("Return")));
inputs.push_back(cnode_prim);
CNodePtr cnode_return = func_graph->NewCNode(inputs);
func_graph->set_return(cnode_return);

Loading…
Cancel
Save