From: @jinyaohui
Reviewed-by: @kingxian,@kingxian
Signed-off-by: @kingxian,@kingxian
pull/11691/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit b59c17172a

@ -80,46 +80,6 @@
##### Python API
###### `ops.TensorAdd`, `ops.Gelu`, `ops.GeluGrad`, `ops.FastGelu`, `ops.FastGeluGrad`, `ops.GatherV2`, change API name to `ops.Add` ([!11568](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11568)), `ops.GeLU`, `ops.GeLUGrad`, `ops.FastGeLU`, `ops.FastGeLUGrad` ([!11603](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11603)), `ops.Gather` ([!11713](https://gitee.com/mind_spore/dashboard/projects/mindspore/mindspore/pulls/11713))
The operator name TensorAdd is not standardized, it is changed to Add. Gelu, GeluGrad, FastGelu, and FastGeluGrad names are unified into ReLU naming rules, "lu" is changed to the uppercase "LU", GatherV2 is changed to Gather. The old interface can be used continuously, but will be deleted in subsequent versions, it is recommended to use and switch to the latest interface.
<table>
<tr>
<td style="text-align:center"> 1.1.0 </td> <td style="text-align:center"> 1.1.1 </td>
</tr>
<tr>
<td>
```python
>>> import mindspore.ops as ops
>>>
>>> add = ops.TensorAdd()
>>> gelu = ops.Gelu()
>>> gelu_grad = ops.GeluGrad()
>>> fast_gelu = ops.FastGelu()
>>> fast_gelu_grad = ops.FastGeluGrad()
>>> gather = ops.GatherV2()
```
</td>
<td>
```python
>>> import mindspore.ops as ops
>>>
>>> add = ops.Add()
>>> gelu = ops.GeLU()
>>> gelu_grad = ops.GeLUGrad()
>>> fast_gelu = ops.FastGeLU()
>>> fast_gelu_grad = ops.FastGeLUGrad()
>>> gather = ops.Gather()
```
</td>
</tr>
</table>
###### Delete shape and dtype of class Initializer ([!7373](https://gitee.com/mindspore/mindspore/pulls/7373/files))
Delete shape and dtype attributes of Initializer class.

@ -24,26 +24,13 @@
namespace mindspore {
namespace ops {
void Dropout::Init(const float ratio, const float keep_prob) {
this->set_ratio(ratio);
this->set_keep_prob(keep_prob);
}
void Dropout::set_ratio(const float ratio) {
CheckAndConvertUtils::CheckInRange<float>(kRatio, ratio, kIncludeRight, {0.0, 1.0}, this->name());
this->AddAttr(kRatio, MakeValue(ratio));
}
void Dropout::Init(const float keep_prob) { this->set_keep_prob(keep_prob); }
void Dropout::set_keep_prob(const float keep_prob) {
CheckAndConvertUtils::CheckInRange<float>(kKeepProb, keep_prob, kIncludeRight, {0.0, 1.0}, this->name());
this->AddAttr(kKeepProb, MakeValue(keep_prob));
}
float Dropout::get_ratio() const {
auto value_ptr = this->GetAttr(kRatio);
return GetValue<float>(value_ptr);
}
float Dropout::get_keep_prob() const {
auto value_ptr = this->GetAttr(kKeepProb);
return GetValue<float>(value_ptr);

@ -31,10 +31,8 @@ class Dropout : public PrimitiveC {
Dropout() : PrimitiveC(kNameDropout) {}
~Dropout() = default;
MS_DECLARE_PARENT(Dropout, PrimitiveC);
void Init(const float ratio = 0.5, const float keep_prob = 0.5);
void set_ratio(const float ratio);
void Init(const float keep_prob = 0.5);
void set_keep_prob(const float keep_prob);
float get_ratio() const;
float get_keep_prob() const;
};
AbstractBasePtr DropoutInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,

@ -19,26 +19,13 @@
namespace mindspore {
namespace ops {
void DropoutGrad::Init(const float ratio, const float keep_prob) {
this->set_ratio(ratio);
this->set_keep_prob(keep_prob);
}
void DropoutGrad::set_ratio(const float ratio) {
CheckAndConvertUtils::CheckInRange<float>(kRatio, ratio, kIncludeRight, {0.0, 1.0}, this->name());
this->AddAttr(kRatio, MakeValue(ratio));
}
void DropoutGrad::Init(const float keep_prob) { this->set_keep_prob(keep_prob); }
void DropoutGrad::set_keep_prob(const float keep_prob) {
CheckAndConvertUtils::CheckInRange<float>(kKeepProb, keep_prob, kIncludeRight, {0.0, 1.0}, this->name());
this->AddAttr(kKeepProb, MakeValue(keep_prob));
}
float DropoutGrad::get_ratio() const {
auto value_ptr = GetAttr(kRatio);
return GetValue<float>(value_ptr);
}
float DropoutGrad::get_keep_prob() const {
auto value_ptr = GetAttr(kKeepProb);
return GetValue<float>(value_ptr);
@ -70,6 +57,7 @@ TypePtr DropoutGradInferType(const PrimitivePtr &prim, const std::vector<Abstrac
return data_type;
}
} // namespace
AbstractBasePtr DropoutGradInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) {
return std::make_shared<abstract::AbstractTensor>(DropoutGradInferType(primitive, input_args),

@ -31,10 +31,8 @@ class DropoutGrad : public PrimitiveC {
DropoutGrad() : PrimitiveC(kNameDropoutGrad) {}
~DropoutGrad() = default;
MS_DECLARE_PARENT(DropoutGrad, PrimitiveC);
void Init(const float ratio = 0.5, const float keep_prob = 0.5);
void set_ratio(const float ratio);
void Init(const float keep_prob = 0.5);
void set_keep_prob(const float keep_prob);
float get_ratio() const;
float get_keep_prob() const;
};

@ -0,0 +1,31 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ops/slice.h"
#include <string>
#include <algorithm>
#include <memory>
#include <set>
#include <vector>
#include "ops/op_utils.h"
#include "utils/check_convert_utils.h"
#include "abstract/primitive_infer_map.h"
namespace mindspore {
namespace ops {
REGISTER_PRIMITIVE_C(kNameSlice, Slice);
} // namespace ops
} // namespace mindspore

@ -0,0 +1,40 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_OPS_SLICE_H_
#define MINDSPORE_CORE_OPS_SLICE_H_
#include <map>
#include <vector>
#include <string>
#include <memory>
#include "ops/primitive_c.h"
#include "abstract/abstract_value.h"
#include "utils/check_convert_utils.h"
namespace mindspore {
namespace ops {
constexpr auto kNameSlice = "Slice";
class Slice : public PrimitiveC {
public:
Slice() : PrimitiveC(kNameSlice) { InitIOName({"x", "begin", "size"}, {"output"}); }
~Slice() = default;
MS_DECLARE_PARENT(Slice, PrimitiveC);
void Init() {}
};
} // namespace ops
} // namespace mindspore
#endif // MINDSPORE_CORE_OPS_SLICE_H_

@ -816,10 +816,15 @@ class Gather(PrimitiveWithCheck):
def GatherV2():
"""Warning: This will be changed later"""
"""
Returns a slice of the input tensor based on the specified indices and axis.
The usage of GatherV2 is deprecated. Please use Gather.
"""
logger.warning("WARN_DEPRECATED: The usage of GatherV2 is deprecated. Please use Gather.")
return Gather()
class SparseGatherV2(Gather):
"""
Returns a slice of input tensor based on the specified indices and axis.

@ -163,7 +163,11 @@ class Add(_MathBinaryOp):
def TensorAdd():
"""Warning: This will be changed later"""
"""
Adds two input tensors element-wise.
The usage of TensorAdd is deprecated. Please use Add.
"""
logger.warning("WARN_DEPRECATED: The usage of TensorAdd is deprecated. Please use Add.")
return Add()

@ -2971,6 +2971,11 @@ class GeLU(PrimitiveWithInfer):
def Gelu():
"""
Gaussian Error Linear Units activation function.
The usage of Gelu is deprecated. Please use GeLU.
"""
logger.warning("WARN_DEPRECATED: The usage of Gelu is deprecated. Please use GeLU.")
return GeLU()
@ -3016,7 +3021,13 @@ class FastGeLU(PrimitiveWithInfer):
validator.check_tensor_dtype_valid("input_x", input_x, (mstype.float16, mstype.float32), self.name)
return input_x
def FastGelu():
"""
Fast Gaussian Error Linear Units activation function.
The usage of FastGelu is deprecated. Please use FastGeLU.
"""
logger.warning("WARN_DEPRECATED: The usage of FastGelu is deprecated. Please use FastGeLU.")
return FastGeLU()

@ -35,7 +35,7 @@ class TestDropoutGrad : public UT::Common {
TEST_F(TestDropoutGrad, test_ops_dropout_grad1) {
auto dropout_grad = std::make_shared<DropoutGrad>();
dropout_grad->Init(0.5);
EXPECT_EQ((int64_t)(dropout_grad->get_ratio() - 0.5), 0);
EXPECT_EQ((int64_t)(dropout_grad->get_keep_prob() - 0.5), 0);
auto in = TensorConstructUtils::CreateOnesTensor(kNumberTypeFloat32, std::vector<int64_t>{4});
MS_EXCEPTION_IF_NULL(in);
auto abstract = dropout_grad->Infer({in->ToAbstract(), in->ToAbstract()});

Loading…
Cancel
Save