!12852 Change maketuple in coreops

From: @liangzhibo
Reviewed-by: 
Signed-off-by:
pull/12852/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit d285692217

@ -176,7 +176,7 @@ class PrimLib:
'ReduceSum': Prim(REDUCE),
'ReduceMax': Prim(REDUCE),
'ReduceMin': Prim(REDUCE),
'make_tuple': Prim(CONTROL),
'MakeTuple': Prim(CONTROL),
'ControlDepend': Prim(CONTROL),
'Assign': Prim(ELEMWISE),
'Tanh': Prim(ELEMWISE),

@ -121,7 +121,7 @@ convert_object_map = {
T.next: M.ms_next,
T.hasnext: M.hasnext,
T.make_tuple: F.make_tuple,
T.MakeTuple: F.make_tuple,
T.make_dict: F.make_dict,
T.make_list: F.make_list,
T.make_slice: F.make_slice,

@ -48,7 +48,7 @@ __all__ = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'mod', 'eq', 'ne', 'lt',
'exp', 'log', 'sin', 'cos', 'tan']
def make_tuple(*elts): # pragma: no cover
def MakeTuple(*elts): # pragma: no cover
"""Tuple builder."""
raise RuntimeError('This operation is not meant to be called directly.')

@ -2153,7 +2153,7 @@ class IrParser {
std::vector<ValuePtr> elems;
std::vector<AnfNodePtr> nodes;
nodes.push_back(std::make_shared<ValueNode>(std::make_shared<Primitive>("make_tuple")));
nodes.push_back(std::make_shared<ValueNode>(std::make_shared<Primitive>("MakeTuple")));
ValuePtr elem = nullptr;
AnfNodePtr node = nullptr;
bool node_is_valid = false;

@ -327,7 +327,7 @@ constexpr char UNIQUE[] = "Unique";
// Parallel don't care
constexpr char STRING_EQUAL[] = "string_equal";
constexpr char MAKE_TUPLE[] = "make_tuple";
constexpr char MAKE_TUPLE[] = "MakeTuple";
constexpr char MAKE_LIST[] = "make_list";
constexpr char MAKE_DICT[] = "make_dict";
constexpr char MAKE_SLICE[] = "make_slice";

@ -118,7 +118,7 @@ const char NAMED_PRIMITIVE_GETITEM[] = "getitem";
const char NAMED_PRIMITIVE_SETITEM[] = "setitem";
const char NAMED_PRIMITIVE_HASNEXT[] = "hasnext";
const char NAMED_PRIMITIVE_BOOL[] = "bool_"; // bool: P.identity
const char NAMED_PRIMITIVE_MAKETUPLE[] = "make_tuple";
const char NAMED_PRIMITIVE_MAKETUPLE[] = "MakeTuple";
const char NAMED_PRIMITIVE_MAKELIST[] = "make_list";
const char NAMED_PRIMITIVE_MAKESLICE[] = "make_slice";
const char NAMED_PRIMITIVE_MAKEDICT[] = "make_dict";

@ -47,7 +47,7 @@ namespace abstract {
using mindspore::parse::PyObjectWrapper;
std::unordered_set<std::string> prims_to_skip_undetermined_infer{
"make_tuple", "make_list", "switch", "env_setitem", "env_getitem", "Load", "UpdateState"};
"MakeTuple", "make_list", "switch", "env_setitem", "env_getitem", "Load", "UpdateState"};
EvalResultPtr DoSignatureEvaluator::Run(AnalysisEnginePtr engine, const ConfigPtrList &args_conf_list,
AnfNodeConfigPtr out_conf) {

@ -625,7 +625,7 @@ void DfGraphConvertor::TraceOutput(const AnfNodePtr node) {
name = GetCNodeTargetFuncName(c);
}
if (name == "make_tuple") {
if (name == "MakeTuple") {
for (unsigned int i = 1; i < c->inputs().size(); i++) {
TraceOutput(c->input(i));
}

@ -264,7 +264,7 @@ inline const PrimitivePtr kPrimMomentum = std::make_shared<Primitive>("Momentum"
inline const PrimitivePtr kPrimApplyMomentum = std::make_shared<Primitive>("ApplyMomentum");
inline const PrimitivePtr kPrimApplyFtrl = std::make_shared<Primitive>("ApplyFtrl");
inline const PrimitivePtr kPrimLayerNorm = std::make_shared<Primitive>("LayerNorm");
inline const PrimitivePtr kPrimLrn = std::make_shared<Primitive>("Lrn");
inline const PrimitivePtr kPrimLrn = std::make_shared<Primitive>("LRN");
inline const PrimitivePtr kPrimLayerNormGrad = std::make_shared<Primitive>("LayerNormGrad");
inline const PrimitivePtr kPrimLayerNormXBackprop = std::make_shared<Primitive>("LayerNormXBackprop");
inline const PrimitivePtr kPrimLayerNormBetaGammaBackprop = std::make_shared<Primitive>("LayerNormBetaGammaBackprop");
@ -411,7 +411,7 @@ inline const PrimitivePtr kPrimAssignSub = std::make_shared<Primitive>("AssignSu
inline const PrimitivePtr kPrimSelect = std::make_shared<Primitive>("Select");
inline const PrimitivePtr kPrimCall = std::make_shared<Primitive>("call");
inline const PrimitivePtr kPrimMakeTuple = std::make_shared<Primitive>("make_tuple");
inline const PrimitivePtr kPrimMakeTuple = std::make_shared<Primitive>("MakeTuple");
inline const PrimitivePtr kPrimMakeSlice = std::make_shared<Primitive>("make_slice");
inline const PrimitivePtr kPrimTupleGetItem = std::make_shared<Primitive>(kTupleGetItem);
inline const PrimitivePtr kPrimArrayGetItem = std::make_shared<Primitive>("array_getitem");

@ -26,7 +26,7 @@
namespace mindspore {
namespace ops {
constexpr auto kNameLRN = "Lrn";
constexpr auto kNameLRN = "LRN";
class LRN : public PrimitiveC {
public:
LRN() : PrimitiveC(kNameLRN) { InitIOName({"x"}, {"y"}); }

@ -132,7 +132,7 @@ tuple_len = Primitive("tuple_len")
list_len = Primitive("list_len")
tuple_reversed = Primitive("tuple_reversed")
make_range = Primitive("make_range")
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
make_dict = Primitive('make_dict')
make_list = Primitive('make_list')
make_slice = Primitive('make_slice')

@ -38,7 +38,7 @@ def first_connected(sp, space):
nodes = [sp.nodes[i] for i in cand[0]]
graphs = sp.resolve_connnected_graphs(nodes)
if len(graphs) != 1:
print("connect check faied: ", nodes)
print("connect check failed: ", nodes)
return False
return True
@ -245,7 +245,7 @@ def graph_pat_7():
a1 = gb.tensor([1024, 1024], "float32", name="a1")
a = gb.emit("Abs", a0, 'a')
b = gb.emit("Abs", a1, 'b')
c = gb.emit("make_tuple", [a, b], 'c')
c = gb.emit("MakeTuple", [a, b], 'c')
d = gb.tensor([1024, 1024], "float32", name="d")
gb.op("AddN", d, [c])
gb.emit("Abs", d, 'f')

@ -173,7 +173,7 @@ TEST_F(TestOps, HasTypeTest) {
// Data structures
TEST_F(TestOps, MakeTupleTest) {
auto prim = std::make_shared<Primitive>("make_tuple");
auto prim = std::make_shared<Primitive>("MakeTuple");
ASSERT_EQ(prim->name(), kPrimMakeTuple->name());
}

@ -579,7 +579,7 @@ def test_elim_sum_shape_one(tag):
def test_tuple_getitem(tag):
""" test_tuple_getitem """
fns = FnDict()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
@fns
def make_get_0(x, y):
@ -603,7 +603,7 @@ def test_tuple_getitem(tag):
def test_tuple_setitem(tag):
""" test_tuple_setitem """
fns = FnDict()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_setitem = Primitive('tuple_setitem')
@fns
@ -924,7 +924,7 @@ def test_convert_switch_ops(tag):
add = Primitive(Constants.kScalarAdd)
neg = Primitive('Neg')
tuple_getitem = Primitive(Constants.kTupleGetItem)
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
@fns
def before(cond, x, y):
@ -1032,7 +1032,7 @@ def test_reducesum_one(tag):
def test_print_tuple_wrapper(tag):
fns = FnDict()
print_ = Primitive('Print')
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
@fns
def before1(x, y):

@ -25,7 +25,7 @@ RealDiv = P.RealDiv()
Sqrt = P.Sqrt()
Square = P.Square()
Assign = P.Assign()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
AdamApplyOne = Primitive('AdamApplyOne')
AdamApplyOneAssign = Primitive('AdamApplyOneAssign')

@ -25,7 +25,7 @@ sqrt = P.Sqrt()
real_div = P.RealDiv()
sub = P.Sub()
Assign = P.Assign()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
adam_apply_one_with_decay = Primitive('AdamApplyOneWithDecay')
adam_apply_one_with_decay_assign = Primitive('AdamApplyOneWithDecayAssign')

@ -17,7 +17,7 @@ from mindspore.ops import Primitive
from mindspore.ops import operations as P
addn = P.AddN()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
class FnDict:

@ -16,7 +16,7 @@ from mindspore.ops import Primitive
from mindspore.ops import operations as P
from mindspore.ops import _constants as Constants
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
BatchNorm = P.BatchNorm()
BNTrainingReduce = Primitive('BNTrainingReduce')

@ -16,7 +16,7 @@ from mindspore.ops import Primitive
from mindspore.ops.operations import _grad_ops as G
from mindspore.ops import _constants as Constants
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
BatchNormGradTraining = G.BatchNormGrad(is_training=True)
BatchNormGradInfer = G.BatchNormGrad(is_training=False)

@ -20,7 +20,7 @@ from mindspore.ops import _constants as Constants
batch_norm_grad = G.BatchNormGrad(is_training=True)
bn_training_update_grad = Primitive('BNTrainingUpdateGrad')
bn_training_reduce_grad = Primitive('BNTrainingReduceGrad')
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)

@ -19,7 +19,7 @@ from mindspore.ops import _constants as Constants
batch_norm = P.BatchNorm(is_training=False)
bn_infer = Primitive('BNInfer')
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)

@ -19,7 +19,7 @@ from mindspore.ops import _constants as Constants
batch_norm_grad = G.BatchNormGrad(is_training=False)
bn_infer_grad = Primitive('BNInferGrad')
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)

@ -19,7 +19,7 @@ from mindspore.ops import _constants as Constants
from mindspore.common.tensor import Tensor
import mindspore.common.dtype as mstype
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
bn_grad = G.BatchNormGrad(is_training=True)
sync_bn_grad = G.SyncBatchNormGrad()

@ -20,7 +20,7 @@ from mindspore.ops import _constants as Constants
from mindspore.common.tensor import Tensor
import mindspore.common.dtype as mstype
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
tuple_getitem = Primitive(Constants.kTupleGetItem)
bn = P.BatchNorm(is_training=True)
sync_bn = inner.SyncBatchNorm()

@ -33,7 +33,7 @@ Fusion_matmul_relu = Primitive('FusionOp_MatMul_ReLU')
Add = P.Add()
Sub = P.Sub()
make_tuple = Primitive('make_tuple')
make_tuple = Primitive('MakeTuple')
class FnDict:

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save