diff --git a/RELEASE.md b/RELEASE.md index 7848eec4d9..10abad6eb2 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -80,46 +80,6 @@ ##### Python API -###### `ops.TensorAdd`, `ops.Gelu`, `ops.GeluGrad`, `ops.FastGelu`, `ops.FastGeluGrad`, `ops.GatherV2`, change API name to `ops.Add` ([!11568](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11568)), `ops.GeLU`, `ops.GeLUGrad`, `ops.FastGeLU`, `ops.FastGeLUGrad` ([!11603](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11603)), `ops.Gather` ([!11713](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11713)) - -The operator name TensorAdd is not standardized, it is changed to Add. Gelu, GeluGrad, FastGelu, and FastGeluGrad names are unified into ReLU naming rules, "lu" is changed to the uppercase "LU", GatherV2 is changed to Gather. The old interface can be used continuously, but will be deleted in subsequent versions, it is recommended to use and switch to the latest interface. - - - - - - - - - -
1.1.0 1.1.1
- -```python ->>> import mindspore.ops as ops ->>> ->>> add = ops.TensorAdd() ->>> gelu = ops.Gelu() ->>> gelu_grad = ops.GeluGrad() ->>> fast_gelu = ops.FastGelu() ->>> fast_gelu_grad = ops.FastGeluGrad() ->>> gather = ops.GatherV2() -``` - - - -```python ->>> import mindspore.ops as ops ->>> ->>> add = ops.Add() ->>> gelu = ops.GeLU() ->>> gelu_grad = ops.GeLUGrad() ->>> fast_gelu = ops.FastGeLU() ->>> fast_gelu_grad = ops.FastGeLUGrad() ->>> gather = ops.Gather() -``` - -
- ###### Delete shape and dtype of class Initializer ([!7373](https://gitee.com/mindspore/mindspore/pulls/7373/files)) Delete shape and dtype attributes of Initializer class. diff --git a/mindspore/core/ops/dropout.cc b/mindspore/core/ops/dropout.cc index adeb93f8af..04ba0a585c 100644 --- a/mindspore/core/ops/dropout.cc +++ b/mindspore/core/ops/dropout.cc @@ -24,26 +24,13 @@ namespace mindspore { namespace ops { -void Dropout::Init(const float ratio, const float keep_prob) { - this->set_ratio(ratio); - this->set_keep_prob(keep_prob); -} - -void Dropout::set_ratio(const float ratio) { - CheckAndConvertUtils::CheckInRange(kRatio, ratio, kIncludeRight, {0.0, 1.0}, this->name()); - this->AddAttr(kRatio, MakeValue(ratio)); -} +void Dropout::Init(const float keep_prob) { this->set_keep_prob(keep_prob); } void Dropout::set_keep_prob(const float keep_prob) { CheckAndConvertUtils::CheckInRange(kKeepProb, keep_prob, kIncludeRight, {0.0, 1.0}, this->name()); this->AddAttr(kKeepProb, MakeValue(keep_prob)); } -float Dropout::get_ratio() const { - auto value_ptr = this->GetAttr(kRatio); - return GetValue(value_ptr); -} - float Dropout::get_keep_prob() const { auto value_ptr = this->GetAttr(kKeepProb); return GetValue(value_ptr); diff --git a/mindspore/core/ops/dropout.h b/mindspore/core/ops/dropout.h index e0a1a157e2..e8e19400c1 100644 --- a/mindspore/core/ops/dropout.h +++ b/mindspore/core/ops/dropout.h @@ -31,10 +31,8 @@ class Dropout : public PrimitiveC { Dropout() : PrimitiveC(kNameDropout) {} ~Dropout() = default; MS_DECLARE_PARENT(Dropout, PrimitiveC); - void Init(const float ratio = 0.5, const float keep_prob = 0.5); - void set_ratio(const float ratio); + void Init(const float keep_prob = 0.5); void set_keep_prob(const float keep_prob); - float get_ratio() const; float get_keep_prob() const; }; AbstractBasePtr DropoutInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive, diff --git a/mindspore/core/ops/grad/dropout_grad.cc b/mindspore/core/ops/grad/dropout_grad.cc index 3c9e6af94f..41e626769d 100644 --- a/mindspore/core/ops/grad/dropout_grad.cc +++ b/mindspore/core/ops/grad/dropout_grad.cc @@ -19,26 +19,13 @@ namespace mindspore { namespace ops { -void DropoutGrad::Init(const float ratio, const float keep_prob) { - this->set_ratio(ratio); - this->set_keep_prob(keep_prob); -} - -void DropoutGrad::set_ratio(const float ratio) { - CheckAndConvertUtils::CheckInRange(kRatio, ratio, kIncludeRight, {0.0, 1.0}, this->name()); - this->AddAttr(kRatio, MakeValue(ratio)); -} +void DropoutGrad::Init(const float keep_prob) { this->set_keep_prob(keep_prob); } void DropoutGrad::set_keep_prob(const float keep_prob) { CheckAndConvertUtils::CheckInRange(kKeepProb, keep_prob, kIncludeRight, {0.0, 1.0}, this->name()); this->AddAttr(kKeepProb, MakeValue(keep_prob)); } -float DropoutGrad::get_ratio() const { - auto value_ptr = GetAttr(kRatio); - return GetValue(value_ptr); -} - float DropoutGrad::get_keep_prob() const { auto value_ptr = GetAttr(kKeepProb); return GetValue(value_ptr); @@ -70,6 +57,7 @@ TypePtr DropoutGradInferType(const PrimitivePtr &prim, const std::vector &input_args) { return std::make_shared(DropoutGradInferType(primitive, input_args), diff --git a/mindspore/core/ops/grad/dropout_grad.h b/mindspore/core/ops/grad/dropout_grad.h index 1beb1ad3ae..89f0b0d16e 100644 --- a/mindspore/core/ops/grad/dropout_grad.h +++ b/mindspore/core/ops/grad/dropout_grad.h @@ -31,10 +31,8 @@ class DropoutGrad : public PrimitiveC { DropoutGrad() : PrimitiveC(kNameDropoutGrad) {} ~DropoutGrad() = default; MS_DECLARE_PARENT(DropoutGrad, PrimitiveC); - void Init(const float ratio = 0.5, const float keep_prob = 0.5); - void set_ratio(const float ratio); + void Init(const float keep_prob = 0.5); void set_keep_prob(const float keep_prob); - float get_ratio() const; float get_keep_prob() const; }; diff --git a/mindspore/core/ops/slice.cc b/mindspore/core/ops/slice.cc new file mode 100644 index 0000000000..fd1efa584b --- /dev/null +++ b/mindspore/core/ops/slice.cc @@ -0,0 +1,31 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ops/slice.h" +#include +#include +#include +#include +#include +#include "ops/op_utils.h" +#include "utils/check_convert_utils.h" +#include "abstract/primitive_infer_map.h" + +namespace mindspore { +namespace ops { +REGISTER_PRIMITIVE_C(kNameSlice, Slice); +} // namespace ops +} // namespace mindspore diff --git a/mindspore/core/ops/slice.h b/mindspore/core/ops/slice.h new file mode 100644 index 0000000000..da040f4c4e --- /dev/null +++ b/mindspore/core/ops/slice.h @@ -0,0 +1,40 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_CORE_OPS_SLICE_H_ +#define MINDSPORE_CORE_OPS_SLICE_H_ +#include +#include +#include +#include +#include "ops/primitive_c.h" +#include "abstract/abstract_value.h" +#include "utils/check_convert_utils.h" + +namespace mindspore { +namespace ops { +constexpr auto kNameSlice = "Slice"; +class Slice : public PrimitiveC { + public: + Slice() : PrimitiveC(kNameSlice) { InitIOName({"x", "begin", "size"}, {"output"}); } + ~Slice() = default; + MS_DECLARE_PARENT(Slice, PrimitiveC); + void Init() {} +}; +} // namespace ops +} // namespace mindspore + +#endif // MINDSPORE_CORE_OPS_SLICE_H_ diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index 94c96e615e..ac9b3f0fa5 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -816,10 +816,15 @@ class Gather(PrimitiveWithCheck): def GatherV2(): - """Warning: This will be changed later""" + """ + Returns a slice of the input tensor based on the specified indices and axis. + + The usage of GatherV2 is deprecated. Please use Gather. + """ logger.warning("WARN_DEPRECATED: The usage of GatherV2 is deprecated. Please use Gather.") return Gather() + class SparseGatherV2(Gather): """ Returns a slice of input tensor based on the specified indices and axis. diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index 2a16d58ae3..5ae582e83f 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -163,7 +163,11 @@ class Add(_MathBinaryOp): def TensorAdd(): - """Warning: This will be changed later""" + """ + Adds two input tensors element-wise. + + The usage of TensorAdd is deprecated. Please use Add. + """ logger.warning("WARN_DEPRECATED: The usage of TensorAdd is deprecated. Please use Add.") return Add() diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index c375548670..4a0e5e82e7 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -2970,6 +2970,11 @@ class GeLU(PrimitiveWithInfer): def Gelu(): + """ + Gaussian Error Linear Units activation function. + + The usage of Gelu is deprecated. Please use GeLU. + """ logger.warning("WARN_DEPRECATED: The usage of Gelu is deprecated. Please use GeLU.") return GeLU() @@ -3015,7 +3020,13 @@ class FastGeLU(PrimitiveWithInfer): validator.check_tensor_dtype_valid("input_x", input_x, (mstype.float16, mstype.float32), self.name) return input_x + def FastGelu(): + """ + Fast Gaussian Error Linear Units activation function. + + The usage of FastGelu is deprecated. Please use FastGeLU. + """ logger.warning("WARN_DEPRECATED: The usage of FastGelu is deprecated. Please use FastGeLU.") return FastGeLU() diff --git a/tests/ut/cpp/ops/test_ops_dropout_grad.cc b/tests/ut/cpp/ops/test_ops_dropout_grad.cc index fe0f0336d2..636f2b4126 100644 --- a/tests/ut/cpp/ops/test_ops_dropout_grad.cc +++ b/tests/ut/cpp/ops/test_ops_dropout_grad.cc @@ -35,7 +35,7 @@ class TestDropoutGrad : public UT::Common { TEST_F(TestDropoutGrad, test_ops_dropout_grad1) { auto dropout_grad = std::make_shared(); dropout_grad->Init(0.5); - EXPECT_EQ((int64_t)(dropout_grad->get_ratio() - 0.5), 0); + EXPECT_EQ((int64_t)(dropout_grad->get_keep_prob() - 0.5), 0); auto in = TensorConstructUtils::CreateOnesTensor(kNumberTypeFloat32, std::vector{4}); MS_EXCEPTION_IF_NULL(in); auto abstract = dropout_grad->Infer({in->ToAbstract(), in->ToAbstract()});