!5110 [MS][Lite]add activationType attr for pooling

Merge pull request !5110 from songhonglei413/roi
pull/5110/MERGE
mindspore-ci-bot 5 years ago committed by Gitee
commit b4bc1deb4b

File diff suppressed because it is too large Load Diff

@ -30,6 +30,14 @@ extern "C" {
void AvgPooling(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
void MaxPooling(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
void AvgPoolingRelu(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
void MaxPoolingRelu(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
void AvgPoolingRelu6(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
void MaxPoolingRelu6(const float *input_ptr, float *output_ptr, PoolingParameter *pooling_param, int task_id);
#ifdef __cplusplus
}
#endif

@ -44,6 +44,7 @@ typedef struct PoolingParameter {
int stride_w_;
int stride_h_;
int thread_num_;
ActType act_type_;
} PoolingParameter;
#endif // MINDSPORE_LITE_NNACL_POOLING_PARAMETER_H_

@ -290,6 +290,7 @@ table Pooling {
padLeft: int;
padRight: int;
roundMode: RoundMode;
activationType: ActivationType = 0;
}
table DepthwiseConv2D {

@ -36,6 +36,7 @@ int Pooling::GetPadDown() const { return this->primitive_->value.AsPooling()->pa
int Pooling::GetPadLeft() const { return this->primitive_->value.AsPooling()->padLeft; }
int Pooling::GetPadRight() const { return this->primitive_->value.AsPooling()->padRight; }
int Pooling::GetRoundMode() const { return this->primitive_->value.AsPooling()->roundMode; }
int Pooling::GetActivationType() const { return this->primitive_->value.AsPooling()->activationType; }
void Pooling::SetFormat(int format) { this->primitive_->value.AsPooling()->format = (schema::Format)format; }
void Pooling::SetPoolingMode(int pooling_mode) {
@ -54,6 +55,9 @@ void Pooling::SetPadRight(int pad_right) { this->primitive_->value.AsPooling()->
void Pooling::SetRoundMode(int round_mode) {
this->primitive_->value.AsPooling()->roundMode = (schema::RoundMode)round_mode;
}
void Pooling::SetActivationType(int activation_type) {
this->primitive_->value.AsPooling()->activationType = (schema::ActivationType)activation_type;
}
int Pooling::UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) {
if (this->primitive_ == nullptr) {
@ -130,6 +134,7 @@ int Pooling::GetPadDown() const { return this->primitive_->value_as_Pooling()->p
int Pooling::GetPadLeft() const { return this->primitive_->value_as_Pooling()->padLeft(); }
int Pooling::GetPadRight() const { return this->primitive_->value_as_Pooling()->padRight(); }
int Pooling::GetRoundMode() const { return this->primitive_->value_as_Pooling()->roundMode(); }
int Pooling::GetActivationType() const { return this->primitive_->value_as_Pooling()->activationType(); }
#endif

@ -44,6 +44,7 @@ class Pooling : public PrimitiveC {
void SetPadLeft(int pad_left);
void SetPadRight(int pad_right);
void SetRoundMode(int round_mode);
void SetActivationType(int activation_type);
#else
explicit Pooling(schema::Primitive *primitive) : PrimitiveC(primitive) {}
#endif
@ -61,6 +62,7 @@ class Pooling : public PrimitiveC {
int GetPadLeft() const;
int GetPadRight() const;
int GetRoundMode() const;
int GetActivationType() const;
int PadUp() const;
int PadDown() const;
@ -74,7 +76,7 @@ class Pooling : public PrimitiveC {
int pad_d_ = 0;
int pad_l_ = 0;
int pad_r_ = 0;
};
}; // namespace lite
} // namespace lite
} // namespace mindspore

@ -314,6 +314,14 @@ OpParameter *PopulatePoolingParameter(const mindspore::lite::PrimitiveC *primiti
pooling_param->round_ceil_ = false;
break;
}
if (pooling_primitive->GetActivationType() == schema::ActivationType_RELU) {
pooling_param->act_type_ = ActType_Relu;
} else if (pooling_primitive->GetActivationType() == schema::ActivationType_RELU6) {
pooling_param->act_type_ = ActType_Relu6;
} else {
pooling_param->act_type_ = ActType_No;
}
return reinterpret_cast<OpParameter *>(pooling_param);
}

@ -53,9 +53,27 @@ int PoolingCPUKernel::RunImpl(int task_id) {
auto input_ptr = reinterpret_cast<float *>(in_tensors_.at(kInputIndex)->Data());
auto output_ptr = reinterpret_cast<float *>(out_tensors_.at(kOutputIndex)->Data());
if (pooling_param_->max_pooling_) {
MaxPooling(input_ptr, output_ptr, pooling_param_, task_id);
switch (pooling_param_->act_type_) {
case ActType_Relu:
MaxPoolingRelu(input_ptr, output_ptr, pooling_param_, task_id);
break;
case ActType_Relu6:
MaxPoolingRelu6(input_ptr, output_ptr, pooling_param_, task_id);
break;
default:
MaxPooling(input_ptr, output_ptr, pooling_param_, task_id);
}
} else {
AvgPooling(input_ptr, output_ptr, pooling_param_, task_id);
switch (pooling_param_->act_type_) {
case ActType_Relu:
AvgPoolingRelu(input_ptr, output_ptr, pooling_param_, task_id);
break;
case ActType_Relu6:
AvgPoolingRelu6(input_ptr, output_ptr, pooling_param_, task_id);
break;
default:
AvgPooling(input_ptr, output_ptr, pooling_param_, task_id);
}
}
return RET_OK;
}

Loading…
Cancel
Save