diff --git a/mindspore/core/abstract/prim_nn.cc b/mindspore/core/abstract/prim_nn.cc index 692814628a..40d25a836a 100644 --- a/mindspore/core/abstract/prim_nn.cc +++ b/mindspore/core/abstract/prim_nn.cc @@ -481,7 +481,7 @@ AbstractBasePtr InferImplPad(const AnalysisEnginePtr &, const PrimitivePtr &prim auto padding_attr = primitive->GetAttr("paddings"); MS_EXCEPTION_IF_NULL(padding_attr); if (!padding_attr->isa()) { - MS_LOG(EXCEPTION) << "paddings is not a ValueTuple"; + MS_LOG(EXCEPTION) << "Paddings is not a ValueTuple"; } std::vector paddings = padding_attr->cast()->value(); std::vector> paddings_vec; @@ -498,7 +498,7 @@ AbstractBasePtr InferImplPad(const AnalysisEnginePtr &, const PrimitivePtr &prim size_t length = paddings_vec.size(); for (size_t i = 0; i < length; ++i) { if (paddings_vec[i].size() != 2) { - MS_LOG(EXCEPTION) << "paddings 's second dim size is not 2"; + MS_LOG(EXCEPTION) << "Paddings 's second dim size is not 2"; } result_shp.push_back(input_shp[i] + paddings_vec[i][0] + paddings_vec[i][1]); } diff --git a/mindspore/core/abstract/prim_others.cc b/mindspore/core/abstract/prim_others.cc index 6a2da572c1..0a4f7b1fa2 100644 --- a/mindspore/core/abstract/prim_others.cc +++ b/mindspore/core/abstract/prim_others.cc @@ -500,19 +500,11 @@ AbstractBasePtr InferImplExpandDims(const AnalysisEnginePtr &, const PrimitivePt MS_EXCEPTION_IF_NULL(x); MS_EXCEPTION_IF_NULL(x->shape()); - auto axis = CheckArg(op_name, args_spec_list, 1); - MS_EXCEPTION_IF_NULL(axis); - std::vector shape; std::vector x_shape = x->shape()->shape(); shape.insert(shape.end(), x_shape.begin(), x_shape.end()); - - auto axis_value = axis->BuildValue(); - if (!axis_value->isa()) { - MS_LOG(EXCEPTION) << axis_value << " axis_value should be tensor, but got " << axis_value->type_name(); - } - auto axis_tensor = axis_value->cast(); - int value = *(static_cast(axis_tensor->data_c())); + auto axis = primitive->GetAttr("axis"); + auto value = GetValue(axis); if (value < -(SizeToInt(x_shape.size()) + 1) || value > SizeToInt(x_shape.size())) { MS_LOG(EXCEPTION) << " axis value shoud be in range [-intput_x.dim-1,input_x.dim], but axis value is" << value << " and input_x.dim is" << x_shape.size(); diff --git a/mindspore/nn/layer/embedding.py b/mindspore/nn/layer/embedding.py index ff68e799f5..f6e8c8f48c 100755 --- a/mindspore/nn/layer/embedding.py +++ b/mindspore/nn/layer/embedding.py @@ -159,7 +159,7 @@ class EmbeddingLookup(Cell): - **input_indices** (Tensor) - The shape of tensor is :math:`(y_1, y_2, ..., y_S)`. Specifies the indices of elements of the original Tensor. Values can be out of range of embedding_table, and the exceeding part will be filled with 0 in the output. Input_indices must only be a 2d tensor in - this interface. + this interface when run in semi auto parallel/auto parallel mode. Outputs: Tensor, the shape of tensor is :math:`(z_1, z_2, ..., z_N)`. @@ -310,7 +310,7 @@ class MultiFieldEmbeddingLookup(EmbeddingLookup): Specifies the weights of elements of the input_indices. The lookout vector will multiply with the input_values. Type is Float32. - **field_ids** (Tensor) - The shape of tensor is :math:`(batch_size, seq_length)`. - Specifies the field id of elements of the input_indices. Type is Type is Int16, Int32. + Specifies the field id of elements of the input_indices. Type is Int16, Int32. Outputs: Tensor, the shape of tensor is :math:`(batch_size, field_size, embedding_size)`. Type is Float32.