diff --git a/mindspore/lite/test/models_tf_fp16.cfg b/mindspore/lite/test/models_tf_fp16.cfg index 605b4b2971..d7f188a8c4 100644 --- a/mindspore/lite/test/models_tf_fp16.cfg +++ b/mindspore/lite/test/models_tf_fp16.cfg @@ -1 +1,2 @@ -decoder_step_201217_modified.pb 5 +decoder_step_201217_modified.pb 5;;0.5 +encoder_0111_control_flow.pb 4;1:1,44:1:1;10 diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index 9877abe026..2d318a2e42 100755 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -1851,15 +1851,15 @@ function Run_arm64_fp16() { # Run tf fp16 models while read line; do - model_name_and_input_num=${line%;*} - length=${#model_name_and_input_num} - input_shapes=${line:length+1} - tf_line_info=${model_name_and_input_num} - if [[ $model_name == \#* ]]; then + tf_line_info=${line} + if [[ $tf_line_info == \#* ]]; then continue fi model_name=`echo ${tf_line_info}|awk -F ' ' '{print $1}'` - input_num=`echo ${tf_line_info}|awk -F ' ' '{print $2}'` + model_info=`echo ${tf_line_info}|awk -F ' ' '{print $2}'` + input_num=`echo ${model_info}|awk -F ';' '{print $1}'` + input_shapes=`echo ${model_info}|awk -F ';' '{print $2}'` + accuracy_limit=`echo ${model_info}|awk -F ';' '{print $3}'` input_files='' for i in $(seq 1 $input_num) do @@ -1867,8 +1867,8 @@ function Run_arm64_fp16() { done echo ${model_name} >> "${run_arm64_fp16_log_file}" echo 'cd /data/local/tmp/benchmark_test' > adb_run_cmd.txt - echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.fp16.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --enableFp16=true' >> adb_run_cmd.txt - echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.fp16.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --enableFp16=true' >> adb_run_cmd.txt + echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.fp16.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --enableFp16=true --accuracyThreshold='${accuracy_limit} >> adb_run_cmd.txt + echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/benchmark_test;./benchmark --inputShapes='${input_shapes}' --modelFile='${model_name}'.fp16.ms --inDataFile='${input_files}' --benchmarkDataFile=/data/local/tmp/input_output/output/'${model_name}'.ms.out --enableFp16=true --accuracyThreshold='${accuracy_limit} >> adb_run_cmd.txt cat adb_run_cmd.txt >> "${run_arm64_fp16_log_file}" adb -s ${device_id} shell < adb_run_cmd.txt >> "${run_arm64_fp16_log_file}" if [ $? = 0 ]; then diff --git a/mindspore/lite/tools/optimizer/common/gllo_utils.cc b/mindspore/lite/tools/optimizer/common/gllo_utils.cc index d30b0c36d3..81ee93f940 100644 --- a/mindspore/lite/tools/optimizer/common/gllo_utils.cc +++ b/mindspore/lite/tools/optimizer/common/gllo_utils.cc @@ -546,6 +546,12 @@ bool IsConvNode(const BaseRef &n) { if (prim == nullptr) { return false; } + + if (prim->GetAttr(ops::kActivationType) != nullptr && + GetValue(prim->GetAttr(ops::kActivationType)) != NO_ACTIVATION) { + return false; + } + bool is_depth_wise = prim->GetAttr(ops::kIsDepthWise) != nullptr && GetValue(prim->GetAttr(ops::kIsDepthWise)); return CheckPrimitiveType(anf_node, prim::kPrimConv2DFusion) || diff --git a/mindspore/lite/tools/optimizer/common/pass_manager_extends.cc b/mindspore/lite/tools/optimizer/common/pass_manager_extends.cc index a20ef4b7c6..34f2982843 100644 --- a/mindspore/lite/tools/optimizer/common/pass_manager_extends.cc +++ b/mindspore/lite/tools/optimizer/common/pass_manager_extends.cc @@ -26,7 +26,6 @@ namespace mindspore { namespace opt { -static size_t count = 0; constexpr size_t kMaxRepassTimes = 12; const std::vector &PassManager::Passes() const { return passes_; } @@ -77,6 +76,7 @@ bool PassManager::Run(const FuncGraphPtr &func_graph) const { return false; } bool changed = false; + size_t count = 0; // run all passes bool change = true; while (change) {