From afb60d37fd08f8b66901b938ffff5d0e4c517afd Mon Sep 17 00:00:00 2001 From: unknown Date: Tue, 9 Mar 2021 17:57:41 +0800 Subject: [PATCH] add deeplabv3 and ssd_vgg16 310 inference modified: model_zoo/official/cv/deeplabv3/README.md modified: model_zoo/official/cv/deeplabv3/README_CN.md --- model_zoo/official/cv/deeplabv3/README.md | 2 + model_zoo/official/cv/deeplabv3/README_CN.md | 1 - .../ascend310_infer/{src => }/CMakeLists.txt | 6 +- .../ascend310_infer/{src => }/build.sh | 14 ++++- .../ascend310_infer/fusion_switch.cfg | 1 + .../cv/deeplabv3/ascend310_infer/src/main.cc | 9 ++- .../official/cv/deeplabv3/postprocess.py | 2 + .../cv/deeplabv3/scripts/run_infer_310.sh | 7 +-- model_zoo/official/cv/ssd/README.md | 5 +- .../cv/ssd/{src => ascend310_infer}/aipp.cfg | 0 .../cv/ssd/ascend310_infer/src/main.cc | 56 ++++++++++++++----- model_zoo/official/cv/ssd/export.py | 2 +- model_zoo/official/cv/ssd/postprocess.py | 7 +-- .../official/cv/ssd/scripts/run_infer_310.sh | 25 ++++++--- 14 files changed, 96 insertions(+), 41 deletions(-) rename model_zoo/official/cv/deeplabv3/ascend310_infer/{src => }/CMakeLists.txt (82%) rename model_zoo/official/cv/deeplabv3/ascend310_infer/{src => }/build.sh (75%) create mode 100644 model_zoo/official/cv/deeplabv3/ascend310_infer/fusion_switch.cfg rename model_zoo/official/cv/ssd/{src => ascend310_infer}/aipp.cfg (100%) diff --git a/model_zoo/official/cv/deeplabv3/README.md b/model_zoo/official/cv/deeplabv3/README.md index 92ca328679..666515f745 100644 --- a/model_zoo/official/cv/deeplabv3/README.md +++ b/model_zoo/official/cv/deeplabv3/README.md @@ -482,6 +482,8 @@ Note: There OS is output stride, and MS is multiscale. ## [Export MindIR](#contents) +Currently, batchsize can only set to 1. + ```shell python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [FILE_FORMAT] ``` diff --git a/model_zoo/official/cv/deeplabv3/README_CN.md b/model_zoo/official/cv/deeplabv3/README_CN.md index 89e4016b09..97a2197743 100644 --- a/model_zoo/official/cv/deeplabv3/README_CN.md +++ b/model_zoo/official/cv/deeplabv3/README_CN.md @@ -508,7 +508,6 @@ python export.py --ckpt_file [CKPT_PATH] --file_name [FILE_NAME] --file_format [ ### 用法 -在执行推理前,air文件必须在910上通过export.py文件导出。 目前仅可处理batch_Size为1。 ```shell diff --git a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/CMakeLists.txt b/model_zoo/official/cv/deeplabv3/ascend310_infer/CMakeLists.txt similarity index 82% rename from model_zoo/official/cv/deeplabv3/ascend310_infer/src/CMakeLists.txt rename to model_zoo/official/cv/deeplabv3/ascend310_infer/CMakeLists.txt index 9550b6a74a..ee3c854473 100644 --- a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/CMakeLists.txt +++ b/model_zoo/official/cv/deeplabv3/ascend310_infer/CMakeLists.txt @@ -1,14 +1,14 @@ cmake_minimum_required(VERSION 3.14.1) -project(MindSporeCxxTestcase[CXX]) +project(Ascend310Infer) add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 -g -std=c++17 -Werror -Wall -fPIE -Wl,--allow-shlib-undefined") set(PROJECT_SRC_ROOT ${CMAKE_CURRENT_LIST_DIR}/) option(MINDSPORE_PATH "mindspore install path" "") include_directories(${MINDSPORE_PATH}) include_directories(${MINDSPORE_PATH}/include) -include_directories(${PROJECT_SRC_ROOT}/../inc) +include_directories(${PROJECT_SRC_ROOT}) find_library(MS_LIB libmindspore.so ${MINDSPORE_PATH}/lib) file(GLOB_RECURSE MD_LIB ${MINDSPORE_PATH}/_c_dataengine*) -add_executable(main main.cc utils.cc) +add_executable(main src/main.cc src/utils.cc) target_link_libraries(main ${MS_LIB} ${MD_LIB} gflags) diff --git a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/build.sh b/model_zoo/official/cv/deeplabv3/ascend310_infer/build.sh similarity index 75% rename from model_zoo/official/cv/deeplabv3/ascend310_infer/src/build.sh rename to model_zoo/official/cv/deeplabv3/ascend310_infer/build.sh index 7fac9cff3a..6825591942 100644 --- a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/build.sh +++ b/model_zoo/official/cv/deeplabv3/ascend310_infer/build.sh @@ -13,6 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ +if [ ! -d out ]; then + mkdir out +fi -cmake . -DMINDSPORE_PATH="`pip3.7 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" -make \ No newline at end of file +cd out + +if [ -f "Makefile" ]; then + make clean +fi + +cmake .. \ + -DMINDSPORE_PATH="`pip3.7 show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" +make diff --git a/model_zoo/official/cv/deeplabv3/ascend310_infer/fusion_switch.cfg b/model_zoo/official/cv/deeplabv3/ascend310_infer/fusion_switch.cfg new file mode 100644 index 0000000000..f5fadd5938 --- /dev/null +++ b/model_zoo/official/cv/deeplabv3/ascend310_infer/fusion_switch.cfg @@ -0,0 +1 @@ +ConvBatchnormFusionPass:off \ No newline at end of file diff --git a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/main.cc b/model_zoo/official/cv/deeplabv3/ascend310_infer/src/main.cc index f627d52d9e..cc2c65877a 100644 --- a/model_zoo/official/cv/deeplabv3/ascend310_infer/src/main.cc +++ b/model_zoo/official/cv/deeplabv3/ascend310_infer/src/main.cc @@ -52,6 +52,7 @@ using mindspore::dataset::vision::Decode; DEFINE_string(mindir_path, "", "mindir path"); DEFINE_string(dataset_path, ".", "dataset path"); +DEFINE_string(fusion_switch_path, ".", "fusion switch path"); DEFINE_int32(device_id, 0, "device id"); int PadImage(const MSTensor &input, MSTensor *output) { @@ -122,11 +123,17 @@ int main(int argc, char **argv) { std::cout << "Invalid mindir" << std::endl; return 1; } - + if (RealPath(FLAGS_fusion_switch_path).empty()) { + std::cout << "Invalid fusion switch path" << std::endl; + return 1; + } GlobalContext::SetGlobalDeviceTarget(mindspore::kDeviceTypeAscend310); GlobalContext::SetGlobalDeviceID(FLAGS_device_id); auto graph = Serialization::LoadModel(FLAGS_mindir_path, ModelType::kMindIR); auto model_context = std::make_shared(); + if (!FLAGS_fusion_switch_path.empty()) { + ModelContext::SetFusionSwitchConfigPath(model_context, FLAGS_fusion_switch_path); + } Model model(GraphCell(graph), model_context); Status ret = model.Build(); if (ret != kSuccess) { diff --git a/model_zoo/official/cv/deeplabv3/postprocess.py b/model_zoo/official/cv/deeplabv3/postprocess.py index 67b517e9b2..07122538be 100644 --- a/model_zoo/official/cv/deeplabv3/postprocess.py +++ b/model_zoo/official/cv/deeplabv3/postprocess.py @@ -75,6 +75,8 @@ def eval_batch_scales(args, eval_net, img_lst, scales, def acc_cal(): args = parse_args() + args.image_mean = [103.53, 116.28, 123.675] + args.image_std = [57.375, 57.120, 58.395] # data list with open(args.data_lst) as f: img_lst = f.readlines() diff --git a/model_zoo/official/cv/deeplabv3/scripts/run_infer_310.sh b/model_zoo/official/cv/deeplabv3/scripts/run_infer_310.sh index 44b8c1ba24..fd379a47ca 100644 --- a/model_zoo/official/cv/deeplabv3/scripts/run_infer_310.sh +++ b/model_zoo/official/cv/deeplabv3/scripts/run_infer_310.sh @@ -60,10 +60,7 @@ fi function compile_app() { - cd ../ascend310_infer/src - if [ -f "Makefile" ]; then - make clean - fi + cd ../ascend310_infer bash build.sh &> build.log } @@ -78,7 +75,7 @@ function infer() fi mkdir result_Files mkdir time_Result - ../ascend310_infer/src/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id &> infer.log + ../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --fusion_switch_path=../ascend310_infer/fusion_switch.cfg &> infer.log } function cal_acc() diff --git a/model_zoo/official/cv/ssd/README.md b/model_zoo/official/cv/ssd/README.md index de505b55fa..c61b9463e7 100644 --- a/model_zoo/official/cv/ssd/README.md +++ b/model_zoo/official/cv/ssd/README.md @@ -410,10 +410,11 @@ Current batch_Size can only be set to 1. The precision calculation process needs ```shell # Ascend310 inference -bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID] +bash run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID] ``` -`DEVICE_ID` is optional, default value is 0. +- `DVPP` is mandatory, and must choose from ["DVPP", "CPU"], it's case-insensitive. Note that the image shape of ssd_vgg16 inference is [300, 300], The DVPP hardware restricts width 16-alignment and height even-alignment. Therefore, the network needs to use the CPU operator to process images. +- `DEVICE_ID` is optional, default value is 0. ### result diff --git a/model_zoo/official/cv/ssd/src/aipp.cfg b/model_zoo/official/cv/ssd/ascend310_infer/aipp.cfg similarity index 100% rename from model_zoo/official/cv/ssd/src/aipp.cfg rename to model_zoo/official/cv/ssd/ascend310_infer/aipp.cfg diff --git a/model_zoo/official/cv/ssd/ascend310_infer/src/main.cc b/model_zoo/official/cv/ssd/ascend310_infer/src/main.cc index f1cd97f81f..b696d79f8f 100644 --- a/model_zoo/official/cv/ssd/ascend310_infer/src/main.cc +++ b/model_zoo/official/cv/ssd/ascend310_infer/src/main.cc @@ -29,7 +29,7 @@ #include "include/api/serialization.h" #include "include/minddata/dataset/include/vision_ascend.h" #include "include/minddata/dataset/include/execute.h" - +#include "include/minddata/dataset/include/vision.h" #include "inc/utils.h" using mindspore::GlobalContext; @@ -42,13 +42,20 @@ using mindspore::GraphCell; using mindspore::kSuccess; using mindspore::MSTensor; using mindspore::dataset::Execute; +using mindspore::dataset::TensorTransform; using mindspore::dataset::vision::DvppDecodeResizeJpeg; - +using mindspore::dataset::vision::Resize; +using mindspore::dataset::vision::HWC2CHW; +using mindspore::dataset::vision::Normalize; +using mindspore::dataset::vision::Decode; DEFINE_string(mindir_path, "", "mindir path"); DEFINE_string(dataset_path, ".", "dataset path"); DEFINE_int32(device_id, 0, "device id"); DEFINE_string(aipp_path, "./aipp.cfg", "aipp path"); +DEFINE_string(cpu_dvpp, "DVPP", "cpu or dvpp process"); +DEFINE_int32(image_height, 640, "image height"); +DEFINE_int32(image_width, 640, "image width"); int main(int argc, char **argv) { gflags::ParseCommandLineFlags(&argc, &argv, true); @@ -56,17 +63,18 @@ int main(int argc, char **argv) { std::cout << "Invalid mindir" << std::endl; return 1; } - if (RealPath(FLAGS_aipp_path).empty()) { - std::cout << "Invalid aipp path" << std::endl; - return 1; - } GlobalContext::SetGlobalDeviceTarget(mindspore::kDeviceTypeAscend310); GlobalContext::SetGlobalDeviceID(FLAGS_device_id); auto graph = Serialization::LoadModel(FLAGS_mindir_path, ModelType::kMindIR); auto model_context = std::make_shared(); - if (!FLAGS_aipp_path.empty()) { - ModelContext::SetInsertOpConfigPath(model_context, FLAGS_aipp_path); + if (FLAGS_cpu_dvpp == "DVPP") { + if (RealPath(FLAGS_aipp_path).empty()) { + std::cout << "Invalid aipp path" << std::endl; + return 1; + } else { + ModelContext::SetInsertOpConfigPath(model_context, FLAGS_aipp_path); + } } Model model(GraphCell(graph), model_context); @@ -84,7 +92,7 @@ int main(int argc, char **argv) { std::map costTime_map; size_t size = all_files.size(); - Execute resize_op(std::shared_ptr(new DvppDecodeResizeJpeg({640, 640}))); + for (size_t i = 0; i < size; ++i) { struct timeval start = {0}; struct timeval end = {0}; @@ -93,11 +101,33 @@ int main(int argc, char **argv) { std::vector inputs; std::vector outputs; std::cout << "Start predict input files:" << all_files[i] << std::endl; - auto imgDvpp = std::make_shared(); - resize_op(ReadFileToTensor(all_files[i]), imgDvpp.get()); - - inputs.emplace_back(imgDvpp->Name(), imgDvpp->DataType(), imgDvpp->Shape(), + if (FLAGS_cpu_dvpp == "DVPP") { + auto resizeShape = {static_cast (FLAGS_image_height), static_cast (FLAGS_image_width)}; + Execute resize_op(std::shared_ptr(new DvppDecodeResizeJpeg(resizeShape))); + auto imgDvpp = std::make_shared(); + resize_op(ReadFileToTensor(all_files[i]), imgDvpp.get()); + inputs.emplace_back(imgDvpp->Name(), imgDvpp->DataType(), imgDvpp->Shape(), imgDvpp->Data().get(), imgDvpp->DataSize()); + } else { + std::shared_ptr decode(new Decode()); + std::shared_ptr hwc2chw(new HWC2CHW()); + std::shared_ptr normalize( + new Normalize({123.675, 116.28, 103.53}, {58.395, 57.120, 57.375})); + auto resizeShape = {FLAGS_image_height, FLAGS_image_width}; + std::shared_ptr resize(new Resize(resizeShape)); + Execute composeDecode({decode, resize, normalize, hwc2chw}); + auto img = MSTensor(); + auto image = ReadFileToTensor(all_files[i]); + composeDecode(image, &img); + std::vector model_inputs = model.GetInputs(); + if (model_inputs.empty()) { + std::cout << "Invalid model, inputs is empty." << std::endl; + return 1; + } + inputs.emplace_back(model_inputs[0].Name(), model_inputs[0].DataType(), model_inputs[0].Shape(), + img.Data().get(), img.DataSize()); + } + gettimeofday(&start, nullptr); ret = model.Predict(inputs, &outputs); gettimeofday(&end, nullptr); diff --git a/model_zoo/official/cv/ssd/export.py b/model_zoo/official/cv/ssd/export.py index 4aff4e553d..c9c284cf51 100644 --- a/model_zoo/official/cv/ssd/export.py +++ b/model_zoo/official/cv/ssd/export.py @@ -28,7 +28,7 @@ parser.add_argument("--device_id", type=int, default=0, help="Device id") parser.add_argument("--batch_size", type=int, default=1, help="batch size") parser.add_argument("--ckpt_file", type=str, required=True, help="Checkpoint file path.") parser.add_argument("--file_name", type=str, default="ssd", help="output file name.") -parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='AIR', help='file format') +parser.add_argument('--file_format', type=str, choices=["AIR", "MINDIR"], default='AIR', help='file format') parser.add_argument("--device_target", type=str, choices=["Ascend", "GPU", "CPU"], default="Ascend", help="device target") args = parser.parse_args() diff --git a/model_zoo/official/cv/ssd/postprocess.py b/model_zoo/official/cv/ssd/postprocess.py index 05c570817f..44f6964c29 100644 --- a/model_zoo/official/cv/ssd/postprocess.py +++ b/model_zoo/official/cv/ssd/postprocess.py @@ -22,7 +22,7 @@ from src.config import config from src.eval_utils import metrics batch_size = 1 -parser = argparse.ArgumentParser(description="ssd_mobilenet_v1_fpn inference") +parser = argparse.ArgumentParser(description="ssd acc calculation") parser.add_argument("--result_path", type=str, required=True, help="result files path.") parser.add_argument("--img_path", type=str, required=True, help="image file path.") parser.add_argument("--drop", action="store_true", help="drop iscrowd images or not.") @@ -73,9 +73,8 @@ def get_result(result_path, img_id_file_path): image_shape = np.array([img_size[1], img_size[0]]) result_path_0 = os.path.join(result_path, img_ids_name + "_0.bin") result_path_1 = os.path.join(result_path, img_ids_name + "_1.bin") - - boxes = np.fromfile(result_path_0, dtype=np.float32).reshape(51150, 4) - box_scores = np.fromfile(result_path_1, dtype=np.float32).reshape(51150, 81) + boxes = np.fromfile(result_path_0, dtype=np.float32).reshape(config.num_ssd_boxes, 4) + box_scores = np.fromfile(result_path_1, dtype=np.float32).reshape(config.num_ssd_boxes, config.num_classes) pred_data.append({ "boxes": boxes, diff --git a/model_zoo/official/cv/ssd/scripts/run_infer_310.sh b/model_zoo/official/cv/ssd/scripts/run_infer_310.sh index 59be2e3cd9..2cc265ca7a 100644 --- a/model_zoo/official/cv/ssd/scripts/run_infer_310.sh +++ b/model_zoo/official/cv/ssd/scripts/run_infer_310.sh @@ -14,8 +14,9 @@ # limitations under the License. # ============================================================================ -if [[ $# -lt 2 || $# -gt 3 ]]; then - echo "Usage: sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DEVICE_ID] +if [[ $# -lt 3 || $# -gt 4 ]]; then + echo "Usage: sh run_infer_310.sh [MINDIR_PATH] [DATA_PATH] [DVPP] [DEVICE_ID] + DVPP is mandatory, and must choose from [DVPP|CPU], it's case-insensitive DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" exit 1 fi @@ -29,14 +30,16 @@ get_real_path(){ } model=$(get_real_path $1) data_path=$(get_real_path $2) +DVPP=${3^^} device_id=0 -if [ $# == 3 ]; then - device_id=$3 +if [ $# == 4 ]; then + device_id=$4 fi echo "mindir name: "$model echo "dataset path: "$data_path +echo "image process mode: "$DVPP echo "device id: "$device_id export ASCEND_HOME=/usr/local/Ascend/ @@ -56,10 +59,7 @@ fi function compile_app() { cd ../ascend310_infer - if [ -f "Makefile" ]; then - make clean - fi - sh build.sh &> build.log + sh build.sh &> build.log } function infer() @@ -73,7 +73,14 @@ function infer() fi mkdir result_Files mkdir time_Result - ../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --aipp_path ../src/aipp.cfg &> infer.log + if [ "$DVPP" == "DVPP" ];then + ../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --device_id=$device_id --cpu_dvpp=$DVPP --aipp_path=../ascend310_infer/aipp.cfg --image_height=640 --image_width=640 &> infer.log + elif [ "$DVPP" == "CPU" ]; then + ../ascend310_infer/out/main --mindir_path=$model --dataset_path=$data_path --cpu_dvpp=$DVPP --device_id=$device_id --image_height=300 --image_width=300 &> infer.log + else + echo "image process mode must be in [DVPP|CPU]" + exit 1 + fi } function cal_acc()