diff --git a/mindspore/ccsrc/frontend/operator/composite/do_signature.cc b/mindspore/ccsrc/frontend/operator/composite/do_signature.cc index 4ca8eaa152..29e4d2f4cf 100644 --- a/mindspore/ccsrc/frontend/operator/composite/do_signature.cc +++ b/mindspore/ccsrc/frontend/operator/composite/do_signature.cc @@ -297,8 +297,7 @@ AnfNodePtr BuildNewCNode(const FuncGraphPtr &func_graph, const std::string &func // If sig is SignatureEnumRW::kRWRef, not do anything. } else if (sig == SignatureEnumRW::kRWWrite && !((type->type_id() == kObjectTypeRef) || (type->type_id() == kObjectTypeRefKey))) { - MS_EXCEPTION(TypeError) << "Function " << func_name << "'s input " << i << " should be a Parameter, but " - << type->ToString(); + RaiseExceptionForCheckParameter(func_name, i, type->ToString()); } MS_LOG(DEBUG) << "Function " << func_name << "'s input " << i << " " << param->DebugString(2) << " abs " << args_spec_list[i]->ToString() << " type " << type->ToString(); @@ -338,5 +337,9 @@ void RaiseExceptionForConvertRefDtype(const std::string &func_name, const std::s << "'. The writable arg type is not equal to the largest type, " << "so can not cast automatically."; } +void RaiseExceptionForCheckParameter(const std::string &func_name, int i, const std::string &source_type) { + MS_EXCEPTION(TypeError) << "Function " << func_name << "'s input " << i << " should be a Parameter, but " + << source_type << "."; +} } // namespace prim } // namespace mindspore diff --git a/mindspore/ccsrc/frontend/operator/composite/do_signature.h b/mindspore/ccsrc/frontend/operator/composite/do_signature.h index 33ef78e27c..9148c8da27 100644 --- a/mindspore/ccsrc/frontend/operator/composite/do_signature.h +++ b/mindspore/ccsrc/frontend/operator/composite/do_signature.h @@ -58,8 +58,10 @@ using RWSignaturePtr = std::shared_ptr; extern const std::map type_map; +// shared with pynative void RaiseExceptionForConvertRefDtype(const std::string &func_name, const std::string &ref_type, const std::string &target_type); +void RaiseExceptionForCheckParameter(const std::string &func_name, int i, const std::string &source_type); AnfNodePtr GenerateCNode(const FuncGraphPtr &func_graph, const std::string &func_name, const ValuePtr &function, const AbstractBasePtrList &args_spec_list, const AnfNodePtrList &old_node_inputs); diff --git a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc index 1ea1c059a8..09b2b2b662 100644 --- a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc +++ b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc @@ -279,16 +279,34 @@ void ConvertInputs(const PrimitivePyPtr &prim, const py::list &args, const OpExe } auto obj = out_args[i]; - if (py::isinstance(obj)) { - auto arg = py::cast(obj); - TypeId arg_type_id = arg->data_type(); - if (prim::type_map.find(arg_type_id) == prim::type_map.end() || arg_type_id == it->second) { - continue; + auto sig = signature[i].rw; + bool is_parameter = false; + bool is_same_type = false; + TypeId arg_type_id = kTypeUnknown; + bool is_sig_write = (sig == SignatureEnumRW::kRWWrite); + if (py::isinstance(obj)) { + auto arg = py::cast(obj); + if (arg->is_parameter()) { + is_parameter = true; } - if (signature[i].rw == SignatureEnumRW::kRWWrite) { - prim::RaiseExceptionForConvertRefDtype(prim->name(), TypeIdToMsTypeStr(arg_type_id), - TypeIdToMsTypeStr(it->second)); + arg_type_id = arg->data_type(); + } + if (arg_type_id != 0) { + is_same_type = (prim::type_map.find(arg_type_id) == prim::type_map.end() || arg_type_id == it->second); + } + if (is_sig_write) { + if (!is_parameter) { + prim::RaiseExceptionForCheckParameter(prim->name(), i, "not"); } + if (arg_type_id != 0) { + if (!is_same_type) { + prim::RaiseExceptionForConvertRefDtype(prim->name(), TypeIdToMsTypeStr(arg_type_id), + TypeIdToMsTypeStr(it->second)); + } + } + } + if (is_same_type) { + continue; } if (!py::isinstance(obj) && !py::isinstance(obj) && !py::isinstance(obj)) { diff --git a/mindspore/core/ir/meta_tensor.cc b/mindspore/core/ir/meta_tensor.cc index bd7c839e30..035962d4d7 100644 --- a/mindspore/core/ir/meta_tensor.cc +++ b/mindspore/core/ir/meta_tensor.cc @@ -91,7 +91,11 @@ void MetaTensor::SetDeviceInfo(const std::string &format, const TypePtr &data_ty std::string MetaTensor::ToString() const { std::ostringstream buf; - buf << "MetaTensor shape:[" << shape() << "]"; + buf << "MetaTensor(shape=[" << shape() << "]"; + if (is_parameter_) { + buf << ", name=" << param_info_->name(); + } + buf << ")"; return buf.str(); } diff --git a/mindspore/core/ir/tensor.cc b/mindspore/core/ir/tensor.cc index 7b4414df13..4a2559c2b8 100644 --- a/mindspore/core/ir/tensor.cc +++ b/mindspore/core/ir/tensor.cc @@ -525,10 +525,14 @@ std::string Tensor::ToStringInternal(int limit_size) const { buf << "Tensor(shape=" << ShapeToString(shape_) << ", dtype=" << dtype->ToString() << ','; if (limit_size <= 0 || DataSize() < limit_size) { // Only print data for small tensor. - buf << ((data().ndim() > 1) ? '\n' : ' ') << data().ToString(data_type_, shape_, false) << ')'; + buf << ((data().ndim() > 1) ? '\n' : ' ') << data().ToString(data_type_, shape_, false); } else { - buf << " [...])"; + buf << " [...]"; } + if (is_parameter_) { + buf << ", name=" << param_info_->name(); + } + buf << ")"; return buf.str(); } diff --git a/mindspore/ops/__init__.py b/mindspore/ops/__init__.py index 1a21f90a07..68a209f6a9 100644 --- a/mindspore/ops/__init__.py +++ b/mindspore/ops/__init__.py @@ -36,6 +36,9 @@ from .op_info_register import op_info_register, AkgGpuRegOp, AkgAscendRegOp, AiC from .primitive import constexpr from . import composite, operations, functional from . import signature +from .composite import * +from .operations import * +from .functional import * __primitive__ = [ "prim_attr_register", "Primitive", "PrimitiveWithInfer", "signature" diff --git a/mindspore/ops/operations/other_ops.py b/mindspore/ops/operations/other_ops.py index 8552c41cdf..7e5d88801f 100644 --- a/mindspore/ops/operations/other_ops.py +++ b/mindspore/ops/operations/other_ops.py @@ -18,10 +18,10 @@ import functools from .. import signature as sig from ..._checkparam import Validator as validator, Rel from ...common import dtype as mstype -from ..primitive import Primitive, PrimitiveWithInfer, prim_attr_register +from ..primitive import Primitive, PrimitiveWithCheck, PrimitiveWithInfer, prim_attr_register -class Assign(Primitive): +class Assign(PrimitiveWithCheck): """ Assign `Parameter` with a value. @@ -59,14 +59,10 @@ class Assign(Primitive): def __init__(self): self.init_prim_io_names(inputs=['ref', 'value'], outputs=['output']) - def infer_shape(self, variable, value): - return variable - - def infer_dtype(self, variable, value): + def check_dtype(self, variable, value): if variable != mstype.type_refkey: validator.check_tensor_type_same({"variable": variable}, mstype.number_type, self.name) validator.check_scalar_or_tensor_type_same({"value": value}, mstype.number_type, self.name) - return variable class BoundingBoxEncode(PrimitiveWithInfer): diff --git a/tests/ut/python/pipeline/parse/test_parse.py b/tests/ut/python/pipeline/parse/test_parse.py index 3a8e182b84..5183d1d55f 100644 --- a/tests/ut/python/pipeline/parse/test_parse.py +++ b/tests/ut/python/pipeline/parse/test_parse.py @@ -306,9 +306,28 @@ class Assign(nn.Cell): self.cov_step = self.cov_step + x return self.cov_step + def test_assign(): context.set_context(mode=context.GRAPH_MODE) net = Assign() input_data = ms.Tensor(np.array(1).astype(np.int32)) net_back = GradNet(net) net_back(input_data) + +class AssignCheck(nn.Cell): + """ NetWithNDarray definition """ + + def __init__(self): + super(AssignCheck, self).__init__() + self.cov_step = ms.Parameter(0.0, name="cov_step", requires_grad=False) + + def construct(self, x): + self.cov_step = x + return self.cov_step + + +def test_assign_check_none(): + context.set_context(mode=context.GRAPH_MODE) + net = AssignCheck() + with pytest.raises(TypeError): + net(None) diff --git a/tests/ut/python/pynative_mode/test_implicit_conversion.py b/tests/ut/python/pynative_mode/test_implicit_conversion.py index 39c885bd66..594bc29d48 100644 --- a/tests/ut/python/pynative_mode/test_implicit_conversion.py +++ b/tests/ut/python/pynative_mode/test_implicit_conversion.py @@ -15,9 +15,11 @@ """ test implicit conversion """ import numpy as np import pytest +import mindspore as ms -from mindspore import Tensor, nn +from mindspore import Tensor, nn, Parameter from mindspore.ops import composite as C +from mindspore.ops import functional as F grad_all_with_sens = C.GradOperation(get_all=True, sens_param=True) @@ -263,3 +265,24 @@ def test_int8_tensor_and_uint8_tensors_add_grad(): assert ret[1].dtype == y.dtype assert (ret[0].asnumpy() == sens.asnumpy()).all() assert (ret[1].asnumpy() == sens.asnumpy()).all() + +class AssignCheck(nn.Cell): + """ NetWithNDarray definition """ + + def __init__(self): + super(AssignCheck, self).__init__() + self.cov_step = Parameter(0.0, name="cov_step", requires_grad=False) + + def construct(self, x, y): + F.assign(self.cov_step, y) + F.assign(x, y) + return x + + +def test_assign_check_in_sig(): + net = AssignCheck() + x = Tensor(2, ms.int8) + y = Tensor(3, ms.uint8) + with pytest.raises(TypeError) as e: + net(x, y) + assert "Parameter" in e.value.args[0]