From e6dea906259195212959ad28803a1f57abac58e5 Mon Sep 17 00:00:00 2001 From: sunsuodong Date: Tue, 25 Aug 2020 15:52:11 +0800 Subject: [PATCH] fix_fp16_conv1x1 --- .../lite/src/runtime/kernel/arm/fp16/convolution_1x1_fp16.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/lite/src/runtime/kernel/arm/fp16/convolution_1x1_fp16.cc b/mindspore/lite/src/runtime/kernel/arm/fp16/convolution_1x1_fp16.cc index ceca388d17..b5babf681d 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp16/convolution_1x1_fp16.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp16/convolution_1x1_fp16.cc @@ -83,7 +83,6 @@ int Convolution1x1FP16CPUKernel::InitConv1x1Param() { } int Convolution1x1FP16CPUKernel::InitWeightBias() { - auto bias_tensor = in_tensors_.at(kBiasIndex); auto weight_tensor = in_tensors_.at(kWeightIndex); auto input_channel = weight_tensor->Channel(); auto output_channel = weight_tensor->Batch(); @@ -96,6 +95,7 @@ int Convolution1x1FP16CPUKernel::InitWeightBias() { } memset(bias_data_, 0, size); if (in_tensors_.size() == 3) { + auto bias_tensor = in_tensors_.at(kBiasIndex); if (bias_tensor->data_type() == kNumberTypeFloat16) { memcpy(bias_data_, bias_tensor->Data(), output_channel * sizeof(float16_t)); } else {