!8782 add one onnx model & dpp memory leak check

From: @wangzhe128
Reviewed-by: @zhanghaibo5,@HilbertDavid
Signed-off-by: @zhanghaibo5
pull/8782/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 3d49a9b06d

@ -133,6 +133,16 @@ void DetectionPostProcessBaseCPUKernel::FreeAllocatedBuffer() {
context_->allocator->Free(params_->selected_);
params_->selected_ = nullptr;
}
if (desc_.data_type == kNumberTypeInt8) {
if (input_boxes_ != nullptr) {
context_->allocator->Free(input_boxes_);
input_boxes_ = nullptr;
}
if (input_scores_ != nullptr) {
context_->allocator->Free(input_scores_);
input_scores_ = nullptr;
}
}
}
int DetectionPostProcessBaseCPUKernel::Run() {

@ -33,8 +33,8 @@ int DetectionPostProcessCPUKernel::GetInputData() {
MS_LOG(ERROR) << "Input data type error";
return RET_ERROR;
}
input_boxes_ = reinterpret_cast<float *>(in_tensors_.at(0)->MutableData());
input_scores_ = reinterpret_cast<float *>(in_tensors_.at(1)->MutableData());
input_boxes_ = reinterpret_cast<float *>(in_tensors_.at(0)->data_c());
input_scores_ = reinterpret_cast<float *>(in_tensors_.at(1)->data_c());
return RET_OK;
}

@ -50,7 +50,7 @@ int DequantizeInt8ToFp32Run(void *cdata, int task_id) {
}
int DetectionPostProcessInt8CPUKernel::Dequantize(lite::Tensor *tensor, float **data) {
data_int8_ = reinterpret_cast<int8_t *>(tensor->MutableData());
data_int8_ = reinterpret_cast<int8_t *>(tensor->data_c());
*data = reinterpret_cast<float *>(context_->allocator->Malloc(tensor->ElementsNum() * sizeof(float)));
if (*data == nullptr) {
MS_LOG(ERROR) << "Malloc data failed.";

@ -24,3 +24,4 @@ efficientnet_lite3_int8_2.tflite
efficientnet_lite4_int8_2.tflite
mtk_transformer_encoder.tflite
mtk_transformer_decoder_joint.tflite
ml_ei_facedetection.onnx

Loading…
Cancel
Save