From a60957f3861aa7d9477c07abe8ae7c556621a72c Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Wed, 7 Nov 2018 13:10:12 +0100 Subject: [PATCH 1/5] addd test_analyzer_mobilenet --- paddle/fluid/inference/analysis/analyzer.h | 6 +- .../fluid/inference/tests/api/CMakeLists.txt | 8 ++ .../tests/api/analyzer_mobilenet_tester.cc | 108 ++++++++++++++++++ 3 files changed, 120 insertions(+), 2 deletions(-) create mode 100644 paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc diff --git a/paddle/fluid/inference/analysis/analyzer.h b/paddle/fluid/inference/analysis/analyzer.h index 3af1d572df..b5dc1fbbe7 100644 --- a/paddle/fluid/inference/analysis/analyzer.h +++ b/paddle/fluid/inference/analysis/analyzer.h @@ -66,7 +66,10 @@ class Analyzer : public OrderedRegistry { // merged in a larger fuse op. The small fusion will not break the pattern of // larger fusion. const std::vector all_ir_passes_{{ - // Manual update the passes here. +// Manual update the passes here. +#ifdef PADDLE_WITH_MKLDNN + "depthwise_conv_mkldnn_pass", // +#endif "attention_lstm_fuse_pass", // "seqconv_eltadd_relu_fuse_pass", // "embedding_fc_lstm_fuse_pass", // @@ -79,7 +82,6 @@ class Analyzer : public OrderedRegistry { "conv_bn_fuse_pass", // "conv_eltwiseadd_bn_fuse_pass", // #ifdef PADDLE_WITH_MKLDNN - "depthwise_conv_mkldnn_pass", // "conv_bias_mkldnn_fuse_pass", // "conv_relu_mkldnn_fuse_pass", // "conv_elementwise_add_mkldnn_fuse_pass", // diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 2ca84c8005..10ad252305 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -82,6 +82,14 @@ inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} analyzer_vis_te inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 "${INFERENCE_DEMO_INSTALL_DIR}/resnet50" analyzer_resnet50_tester.cc "resnet50_model.tar.gz") +# mobilenet +set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") +if (NOT EXISTS ${MOBILENET_INSTALL_DIR}) + inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddle-inference-dist.bj.bcebos.com/tensorrt_test" "mobilenet.tar.gz") +endif() +inference_analysis_test(test_analyzer_mobilenet SRCS analyzer_mobilenet_tester.cc + EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${MOBILENET_INSTALL_DIR}/mobilenet) + # anakin if (WITH_ANAKIN AND WITH_MKL) # only needed in CI # anakin rnn1 diff --git a/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc b/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc new file mode 100644 index 0000000000..94ded50e65 --- /dev/null +++ b/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc @@ -0,0 +1,108 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include +#include "paddle/fluid/inference/tests/api/tester_helper.h" + +namespace paddle { +namespace inference { +namespace analysis { + +void SetConfig(AnalysisConfig *cfg) { + cfg->model_dir = FLAGS_infer_model; + cfg->use_gpu = false; + cfg->device = 0; + cfg->enable_ir_optim = true; + cfg->specify_input_name = true; +} + +void SetInput(std::vector> *inputs) { + PADDLE_ENFORCE_EQ(FLAGS_test_all_data, 0, "Only have single batch of data."); + + PaddleTensor input; + // channel=3, height/width=318 + std::vector shape({FLAGS_batch_size, 3, 318, 318}); + input.shape = shape; + input.dtype = PaddleDType::FLOAT32; + + // fill input data, for profile easily, do not use random data here. + size_t size = FLAGS_batch_size * 3 * 318 * 318; + input.data.Resize(size * sizeof(float)); + float *input_data = static_cast(input.data.data()); + for (size_t i = 0; i < size; i++) { + *(input_data + i) = static_cast(i) / size; + } + + std::vector input_slots; + input_slots.assign({input}); + (*inputs).emplace_back(input_slots); +} + +// Easy for profiling independently. +void profile(bool use_mkldnn = false) { + AnalysisConfig cfg; + SetConfig(&cfg); + cfg._use_mkldnn = use_mkldnn; + std::vector outputs; + + std::vector> input_slots_all; + SetInput(&input_slots_all); + TestPrediction(cfg, input_slots_all, &outputs, FLAGS_num_threads); + + if (FLAGS_num_threads == 1 && !FLAGS_test_all_data) { + PADDLE_ENFORCE_EQ(outputs.size(), 1UL); + size_t size = GetSize(outputs[0]); + // output is a 1000-dimension feature + EXPECT_EQ(size, 1000 * FLAGS_batch_size); + } +} + +TEST(Analyzer_mobilenet, profile) { profile(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_mobilenet, profile_mkldnn) { profile(true /* use_mkldnn */); } +#endif + +// Check the depthwise_conv status +TEST(Analyzer_mobilenet, depthwise_conv_statis) { + AnalysisConfig cfg; + SetConfig(&cfg); + cfg._use_mkldnn = true; + int num_ops; + auto predictor = CreatePaddlePredictor(cfg); + auto fuse_statis = GetFuseStatis( + static_cast(predictor.get()), &num_ops); + ASSERT_TRUE(fuse_statis.count("depthwise_conv_mkldnn_pass")); + EXPECT_EQ(fuse_statis.at("depthwise_conv_mkldnn_pass"), 13); +} + +// Compare result of NativeConfig and AnalysisConfig +void compare(bool use_mkldnn = false) { + AnalysisConfig cfg; + SetConfig(&cfg); + cfg._use_mkldnn = use_mkldnn; + + std::vector> input_slots_all; + SetInput(&input_slots_all); + CompareNativeAndAnalysis(cfg, input_slots_all); +} + +TEST(Analyzer_mobilenet, compare) { compare(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_mobilenet, compare_mkldnn) { compare(true /* use_mkldnn */); } +#endif + +} // namespace analysis +} // namespace inference +} // namespace paddle From f395075efce4548af70fbe5c0468bb372985e72b Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Wed, 7 Nov 2018 14:45:29 +0100 Subject: [PATCH 2/5] rebased and stuff broke --- .../fluid/inference/tests/api/CMakeLists.txt | 1 + .../tests/api/analyzer_mobilenet_tester.cc | 32 ++----------------- 2 files changed, 4 insertions(+), 29 deletions(-) diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 10ad252305..9b441b75ee 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -86,6 +86,7 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") if (NOT EXISTS ${MOBILENET_INSTALL_DIR}) inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddle-inference-dist.bj.bcebos.com/tensorrt_test" "mobilenet.tar.gz") + file(RENAME ${MOBILENET_INSTALL_DIR}/mobilenet/__model__ ${MOBILENET_INSTALL_DIR}/mobilenet/model) endif() inference_analysis_test(test_analyzer_mobilenet SRCS analyzer_mobilenet_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${MOBILENET_INSTALL_DIR}/mobilenet) diff --git a/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc b/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc index 94ded50e65..ea48019137 100644 --- a/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc @@ -29,25 +29,7 @@ void SetConfig(AnalysisConfig *cfg) { } void SetInput(std::vector> *inputs) { - PADDLE_ENFORCE_EQ(FLAGS_test_all_data, 0, "Only have single batch of data."); - - PaddleTensor input; - // channel=3, height/width=318 - std::vector shape({FLAGS_batch_size, 3, 318, 318}); - input.shape = shape; - input.dtype = PaddleDType::FLOAT32; - - // fill input data, for profile easily, do not use random data here. - size_t size = FLAGS_batch_size * 3 * 318 * 318; - input.data.Resize(size * sizeof(float)); - float *input_data = static_cast(input.data.data()); - for (size_t i = 0; i < size; i++) { - *(input_data + i) = static_cast(i) / size; - } - - std::vector input_slots; - input_slots.assign({input}); - (*inputs).emplace_back(input_slots); + SetFakeImageInput(inputs, FLAGS_infer_model); } // Easy for profiling independently. @@ -60,13 +42,6 @@ void profile(bool use_mkldnn = false) { std::vector> input_slots_all; SetInput(&input_slots_all); TestPrediction(cfg, input_slots_all, &outputs, FLAGS_num_threads); - - if (FLAGS_num_threads == 1 && !FLAGS_test_all_data) { - PADDLE_ENFORCE_EQ(outputs.size(), 1UL); - size_t size = GetSize(outputs[0]); - // output is a 1000-dimension feature - EXPECT_EQ(size, 1000 * FLAGS_batch_size); - } } TEST(Analyzer_mobilenet, profile) { profile(); } @@ -74,7 +49,7 @@ TEST(Analyzer_mobilenet, profile) { profile(); } TEST(Analyzer_mobilenet, profile_mkldnn) { profile(true /* use_mkldnn */); } #endif -// Check the depthwise_conv status +// Check the depthwise_conv pass status TEST(Analyzer_mobilenet, depthwise_conv_statis) { AnalysisConfig cfg; SetConfig(&cfg); @@ -83,8 +58,7 @@ TEST(Analyzer_mobilenet, depthwise_conv_statis) { auto predictor = CreatePaddlePredictor(cfg); auto fuse_statis = GetFuseStatis( static_cast(predictor.get()), &num_ops); - ASSERT_TRUE(fuse_statis.count("depthwise_conv_mkldnn_pass")); - EXPECT_EQ(fuse_statis.at("depthwise_conv_mkldnn_pass"), 13); + LOG(INFO) << "num_ops: " << num_ops; } // Compare result of NativeConfig and AnalysisConfig From 1987d45e7517edb86167511bf1b8d8125f908917 Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Thu, 8 Nov 2018 15:28:21 +0100 Subject: [PATCH 3/5] add comment for depthwise pass --- paddle/fluid/inference/analysis/analyzer.h | 1 + 1 file changed, 1 insertion(+) diff --git a/paddle/fluid/inference/analysis/analyzer.h b/paddle/fluid/inference/analysis/analyzer.h index b5dc1fbbe7..6edfc9dd11 100644 --- a/paddle/fluid/inference/analysis/analyzer.h +++ b/paddle/fluid/inference/analysis/analyzer.h @@ -68,6 +68,7 @@ class Analyzer : public OrderedRegistry { const std::vector all_ir_passes_{{ // Manual update the passes here. #ifdef PADDLE_WITH_MKLDNN + // This pass should run before any other convolution fuse. "depthwise_conv_mkldnn_pass", // #endif "attention_lstm_fuse_pass", // From b5f617fa9b41edb234181b06491b42b35414c4ad Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Thu, 8 Nov 2018 16:21:29 +0100 Subject: [PATCH 4/5] make mobilenet test reuse resnet50 test --- .../fluid/inference/tests/api/CMakeLists.txt | 9 +- .../tests/api/analyzer_mobilenet_tester.cc | 82 ------------------- 2 files changed, 2 insertions(+), 89 deletions(-) delete mode 100644 paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 9b441b75ee..401ef508bc 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -83,13 +83,8 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 "${INFERENCE_DEMO_INSTALL_DIR}/resnet50" analyzer_resnet50_tester.cc "resnet50_model.tar.gz") # mobilenet -set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") -if (NOT EXISTS ${MOBILENET_INSTALL_DIR}) - inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddle-inference-dist.bj.bcebos.com/tensorrt_test" "mobilenet.tar.gz") - file(RENAME ${MOBILENET_INSTALL_DIR}/mobilenet/__model__ ${MOBILENET_INSTALL_DIR}/mobilenet/model) -endif() -inference_analysis_test(test_analyzer_mobilenet SRCS analyzer_mobilenet_tester.cc - EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${MOBILENET_INSTALL_DIR}/mobilenet) +inference_analysis_api_test_with_fake_data(test_analyzer_mobilenet + "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet" analyzer_resnet50_tester.cc "mobilenet_model.tar.gz") # anakin if (WITH_ANAKIN AND WITH_MKL) # only needed in CI diff --git a/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc b/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc deleted file mode 100644 index ea48019137..0000000000 --- a/paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc +++ /dev/null @@ -1,82 +0,0 @@ -/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include -#include -#include "paddle/fluid/inference/tests/api/tester_helper.h" - -namespace paddle { -namespace inference { -namespace analysis { - -void SetConfig(AnalysisConfig *cfg) { - cfg->model_dir = FLAGS_infer_model; - cfg->use_gpu = false; - cfg->device = 0; - cfg->enable_ir_optim = true; - cfg->specify_input_name = true; -} - -void SetInput(std::vector> *inputs) { - SetFakeImageInput(inputs, FLAGS_infer_model); -} - -// Easy for profiling independently. -void profile(bool use_mkldnn = false) { - AnalysisConfig cfg; - SetConfig(&cfg); - cfg._use_mkldnn = use_mkldnn; - std::vector outputs; - - std::vector> input_slots_all; - SetInput(&input_slots_all); - TestPrediction(cfg, input_slots_all, &outputs, FLAGS_num_threads); -} - -TEST(Analyzer_mobilenet, profile) { profile(); } -#ifdef PADDLE_WITH_MKLDNN -TEST(Analyzer_mobilenet, profile_mkldnn) { profile(true /* use_mkldnn */); } -#endif - -// Check the depthwise_conv pass status -TEST(Analyzer_mobilenet, depthwise_conv_statis) { - AnalysisConfig cfg; - SetConfig(&cfg); - cfg._use_mkldnn = true; - int num_ops; - auto predictor = CreatePaddlePredictor(cfg); - auto fuse_statis = GetFuseStatis( - static_cast(predictor.get()), &num_ops); - LOG(INFO) << "num_ops: " << num_ops; -} - -// Compare result of NativeConfig and AnalysisConfig -void compare(bool use_mkldnn = false) { - AnalysisConfig cfg; - SetConfig(&cfg); - cfg._use_mkldnn = use_mkldnn; - - std::vector> input_slots_all; - SetInput(&input_slots_all); - CompareNativeAndAnalysis(cfg, input_slots_all); -} - -TEST(Analyzer_mobilenet, compare) { compare(); } -#ifdef PADDLE_WITH_MKLDNN -TEST(Analyzer_mobilenet, compare_mkldnn) { compare(true /* use_mkldnn */); } -#endif - -} // namespace analysis -} // namespace inference -} // namespace paddle From d318583eb529d7b2fe39ce8ee73a6686762add33 Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Thu, 15 Nov 2018 10:04:25 +0100 Subject: [PATCH 5/5] rename mobilenet dir to mobilenet_depthwise_conv test=develop --- paddle/fluid/inference/tests/api/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index fe0937da10..3f765d1d41 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -82,9 +82,9 @@ inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} analyzer_vis_te inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 "${INFERENCE_DEMO_INSTALL_DIR}/resnet50" analyzer_resnet50_tester.cc "resnet50_model.tar.gz") -# mobilenet +# mobilenet with depthwise_conv op inference_analysis_api_test_with_fake_data(test_analyzer_mobilenet - "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet" analyzer_resnet50_tester.cc "mobilenet_model.tar.gz") + "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv" analyzer_resnet50_tester.cc "mobilenet_model.tar.gz") # anakin if (WITH_ANAKIN AND WITH_MKL) # only needed in CI