From 9b5400bb6e1bb7432e22ea64a266866a8e5f3548 Mon Sep 17 00:00:00 2001 From: zhaozhenlong Date: Fri, 18 Sep 2020 15:41:34 +0800 Subject: [PATCH] hard tanh fp32 --- mindspore/lite/nnacl/fp32/activation.c | 18 ++++ mindspore/lite/nnacl/fp32/activation.h | 3 + mindspore/lite/schema/ops.fbs | 5 +- mindspore/lite/src/ops/activation.cc | 8 +- mindspore/lite/src/ops/activation.h | 4 + mindspore/lite/src/populate_parameter.cc | 2 + .../src/runtime/kernel/arm/fp32/activation.cc | 2 + .../src/runtime/kernel/arm/fp32/activation.h | 4 + .../kernel/arm/fp32/activation_fp32_test.cc | 89 +++++++++++++++++++ 9 files changed, 133 insertions(+), 2 deletions(-) diff --git a/mindspore/lite/nnacl/fp32/activation.c b/mindspore/lite/nnacl/fp32/activation.c index 087b0f80b1..a33c8368f4 100644 --- a/mindspore/lite/nnacl/fp32/activation.c +++ b/mindspore/lite/nnacl/fp32/activation.c @@ -116,3 +116,21 @@ int HSwish(const float *src, int length, float *dst) { } return NNACL_OK; } + +int HardTanh(const float *src, int length, float *dst, float min_val, float max_val) { + if (max_val <= min_val) { + return NNACL_ERR; + } + int i = 0; + for (i = 0; i < length; ++i) { + float in = src[i]; + if (in < min_val) { + dst[i] = min_val; + } else if (in > max_val) { + dst[i] = max_val; + } else { + dst[i] = in; + } + } + return NNACL_OK; +} diff --git a/mindspore/lite/nnacl/fp32/activation.h b/mindspore/lite/nnacl/fp32/activation.h index dd1177e2d4..a387d94c44 100644 --- a/mindspore/lite/nnacl/fp32/activation.h +++ b/mindspore/lite/nnacl/fp32/activation.h @@ -24,6 +24,8 @@ typedef struct ActivationParameter { OpParameter op_parameter_; int type_; float alpha_; + float min_val_; + float max_val_; } ActivationParameter; #ifdef __cplusplus @@ -35,6 +37,7 @@ int LRelu(const float *src, int length, float *dst, float alpha); int Sigmoid(const float *src, int length, float *dst); int Tanh(const float *src, int length, float *dst); int HSwish(const float *src, int length, float *dst); +int HardTanh(const float *src, int length, float *dst, float min_val, float max_val); #ifdef __cplusplus } #endif diff --git a/mindspore/lite/schema/ops.fbs b/mindspore/lite/schema/ops.fbs index 42cfe69822..60cf1095f9 100644 --- a/mindspore/lite/schema/ops.fbs +++ b/mindspore/lite/schema/ops.fbs @@ -57,7 +57,8 @@ enum ActivationType : byte { HSIGMOID = 13, THRESHOLDRELU = 14, LINEAR = 15, - UNKNOW = 16 + HARD_TANH = 16, + UNKNOW = 17 } enum ActivationGradType : byte { NO_ACTIVATION = 0, @@ -155,6 +156,8 @@ table SoftMax { table Activation { type: ActivationType = 0; alpha: float = 0.2; + min_val: float = -1.0; + max_val: float = 1.0; } table ActivationGrad { type: ActivationType = 0; diff --git a/mindspore/lite/src/ops/activation.cc b/mindspore/lite/src/ops/activation.cc index b4af4c053c..b31b52e5ff 100644 --- a/mindspore/lite/src/ops/activation.cc +++ b/mindspore/lite/src/ops/activation.cc @@ -22,9 +22,13 @@ namespace lite { #ifdef PRIMITIVE_WRITEABLE int Activation::GetType() const { return this->primitive_->value.AsActivation()->type; } float Activation::GetAlpha() const { return this->primitive_->value.AsActivation()->alpha; } +float Activation::GetMinVal() const { return this->primitive_->value.AsActivation()->min_val; } +float Activation::GetMaxVal() const { return this->primitive_->value.AsActivation()->max_val; } void Activation::SetType(int type) { this->primitive_->value.AsActivation()->type = (schema::ActivationType)type; } void Activation::SetAlpha(float alpha) { this->primitive_->value.AsActivation()->alpha = alpha; } +void Activation::SetMinVal(float min_val) { this->primitive_->value.AsActivation()->min_val = min_val; } +void Activation::SetMaxVal(float max_val) { this->primitive_->value.AsActivation()->max_val = max_val; } int Activation::UnPackAttr(const Primitive &prim, const std::vector &inputs) { if (this->primitive_ == nullptr) { @@ -63,13 +67,15 @@ int Activation::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuff MS_LOG(ERROR) << "value_as_Activation return nullptr"; return RET_ERROR; } - auto val_offset = schema::CreateActivation(*fbb, attr->type(), attr->alpha()); + auto val_offset = schema::CreateActivation(*fbb, attr->type(), attr->alpha(), attr->min_val(), attr->max_val()); auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_Activation, val_offset.o); fbb->Finish(prim_offset); return RET_OK; } int Activation::GetType() const { return this->primitive_->value_as_Activation()->type(); } float Activation::GetAlpha() const { return this->primitive_->value_as_Activation()->alpha(); } +float Activation::GetMinVal() const { return this->primitive_->value_as_Activation()->min_val(); } +float Activation::GetMaxVal() const { return this->primitive_->value_as_Activation()->max_val(); } #endif } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/src/ops/activation.h b/mindspore/lite/src/ops/activation.h index 3934572b9c..19181d977c 100644 --- a/mindspore/lite/src/ops/activation.h +++ b/mindspore/lite/src/ops/activation.h @@ -33,6 +33,8 @@ class Activation : public PrimitiveC { int UnPackAttr(const Primitive &prim, const std::vector &inputs) override; void SetType(int type); void SetAlpha(float alpha); + void SetMinVal(float minVal); + void SetMaxVal(float maxVal); #else Activation() = default; @@ -40,6 +42,8 @@ class Activation : public PrimitiveC { #endif int GetType() const; float GetAlpha() const; + float GetMinVal() const; + float GetMaxVal() const; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/src/populate_parameter.cc b/mindspore/lite/src/populate_parameter.cc index c52b403dfa..8b5940f434 100644 --- a/mindspore/lite/src/populate_parameter.cc +++ b/mindspore/lite/src/populate_parameter.cc @@ -636,6 +636,8 @@ OpParameter *PopulateActivationParameter(const mindspore::lite::PrimitiveC *prim reinterpret_cast(const_cast(primitive)); act_param->type_ = static_cast(activation->GetType()); act_param->alpha_ = activation->GetAlpha(); + act_param->min_val_ = activation->GetMinVal(); + act_param->max_val_ = activation->GetMaxVal(); return reinterpret_cast(act_param); } diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/activation.cc b/mindspore/lite/src/runtime/kernel/arm/fp32/activation.cc index 883ce9bea4..622d058aaa 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/activation.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/activation.cc @@ -57,6 +57,8 @@ int ActivationCPUKernel::DoActivation(int task_id) { error_code = Tanh(input_addr + stride * task_id, count, output_addr + stride * task_id); } else if (type_ == schema::ActivationType_HSWISH) { error_code = HSwish(input_addr + stride * task_id, count, output_addr + stride * task_id); + } else if (type_ == schema::ActivationType_HARD_TANH) { + error_code = HardTanh(input_addr + stride * task_id, count, output_addr + stride * task_id, min_val_, max_val_); } else { MS_LOG(ERROR) << "Activation type error"; return RET_ERROR; diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/activation.h b/mindspore/lite/src/runtime/kernel/arm/fp32/activation.h index 8846334a78..3ea436f8cc 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/activation.h +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/activation.h @@ -30,6 +30,8 @@ class ActivationCPUKernel : public LiteKernel { : LiteKernel(param, inputs, outputs, ctx, primitive), thread_count_(ctx->thread_num_) { type_ = (reinterpret_cast(param))->type_; alpha_ = (reinterpret_cast(param))->alpha_; + min_val_ = (reinterpret_cast(param))->min_val_; + max_val_ = (reinterpret_cast(param))->max_val_; } ~ActivationCPUKernel() override = default; @@ -42,6 +44,8 @@ class ActivationCPUKernel : public LiteKernel { int thread_count_; int type_; float alpha_; + float min_val_; + float max_val_; }; } // namespace mindspore::kernel diff --git a/mindspore/lite/test/ut/src/runtime/kernel/arm/fp32/activation_fp32_test.cc b/mindspore/lite/test/ut/src/runtime/kernel/arm/fp32/activation_fp32_test.cc index 86f878c7c4..8907d1efd2 100644 --- a/mindspore/lite/test/ut/src/runtime/kernel/arm/fp32/activation_fp32_test.cc +++ b/mindspore/lite/test/ut/src/runtime/kernel/arm/fp32/activation_fp32_test.cc @@ -126,4 +126,93 @@ TEST_F(TestActivationFp32, HSwishFp32) { input0_tensor.SetData(nullptr); output0_tensor.SetData(nullptr); } + +TEST_F(TestActivationFp32, HardTanh1) { + std::vector inputs_tensor; + std::vector outputs_tensor; + + ActivationParameter op_param; + op_param.op_parameter_.type_ = schema::PrimitiveType_Activation; + op_param.type_ = schema::ActivationType_HARD_TANH; + op_param.min_val_ = -1.0f; + op_param.max_val_ = 1.0f; + + std::vector input = {-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 5.0, 6.0}; + std::vector in_shape = {8}; + + lite::Tensor input0_tensor; + inputs_tensor.push_back(&input0_tensor); + input0_tensor.SetData(input.data()); + input0_tensor.set_shape(in_shape); + + std::vector output(8); + std::vector output_shape = {8}; + + lite::Tensor output0_tensor; + outputs_tensor.push_back(&output0_tensor); + output0_tensor.SetData(output.data()); + + kernel::KernelKey desc = {kernel::KERNEL_ARCH::kCPU, kNumberTypeFloat32, schema::PrimitiveType_Activation}; + auto creator = lite::KernelRegistry::GetInstance()->GetCreator(desc); + ASSERT_NE(creator, nullptr); + lite::InnerContext ctx; + ctx.thread_num_ = 2; + ASSERT_EQ(lite::RET_OK, ctx.Init()); + kernel::LiteKernel *kernel = + creator(inputs_tensor, outputs_tensor, reinterpret_cast(&op_param), &ctx, desc, nullptr); + ASSERT_NE(kernel, nullptr); + auto output_tensor_shape = output0_tensor.shape(); + kernel->Run(); + + std::vector expect_output = {-1.0, -1.0, -0.5, 0.0, 0.5, 1.0, 1.0, 1.0}; + CompareOutputData(output.data(), expect_output.data(), 8, 0.00001); + + input0_tensor.SetData(nullptr); + output0_tensor.SetData(nullptr); +} + +TEST_F(TestActivationFp32, HardTanh2) { + std::vector inputs_tensor; + std::vector outputs_tensor; + + ActivationParameter op_param; + op_param.op_parameter_.type_ = schema::PrimitiveType_Activation; + op_param.type_ = schema::ActivationType_HARD_TANH; + op_param.min_val_ = -2.0f; + op_param.max_val_ = 2.0f; + + std::vector input = {-3.0, -2.0, -1.0, 0.0, 1.0, 5.0, 6.0, 7.0}; + std::vector in_shape = {8}; + + lite::Tensor input0_tensor; + inputs_tensor.push_back(&input0_tensor); + input0_tensor.SetData(input.data()); + input0_tensor.set_shape(in_shape); + + std::vector output(8); + std::vector output_shape = {8}; + + lite::Tensor output0_tensor; + outputs_tensor.push_back(&output0_tensor); + output0_tensor.SetData(output.data()); + + kernel::KernelKey desc = {kernel::KERNEL_ARCH::kCPU, kNumberTypeFloat32, schema::PrimitiveType_Activation}; + auto creator = lite::KernelRegistry::GetInstance()->GetCreator(desc); + ASSERT_NE(creator, nullptr); + lite::InnerContext ctx; + ctx.thread_num_ = 2; + ASSERT_EQ(lite::RET_OK, ctx.Init()); + kernel::LiteKernel *kernel = + creator(inputs_tensor, outputs_tensor, reinterpret_cast(&op_param), &ctx, desc, nullptr); + ASSERT_NE(kernel, nullptr); + auto output_tensor_shape = output0_tensor.shape(); + kernel->Run(); + + std::vector expect_output = {-2.0, -2.0, -1.0, 0.0, 1.0, 2.0, 2.0, 2.0}; + CompareOutputData(output.data(), expect_output.data(), 8, 0.00001); + + input0_tensor.SetData(nullptr); + output0_tensor.SetData(nullptr); +} + } // namespace mindspore