|
|
|
@ -18,7 +18,6 @@ limitations under the License. */
|
|
|
|
|
#include <string>
|
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
|
|
#include "paddle/fluid/framework/ir/graph_to_program_pass.h"
|
|
|
|
|
#include "paddle/fluid/framework/lod_tensor.h"
|
|
|
|
|
#include "paddle/fluid/inference/io.h"
|
|
|
|
|
#include "paddle/fluid/platform/profiler.h"
|
|
|
|
@ -94,15 +93,15 @@ void CheckError(const paddle::framework::LoDTensor& output1,
|
|
|
|
|
|
|
|
|
|
std::unique_ptr<paddle::framework::ProgramDesc> InitProgram(
|
|
|
|
|
paddle::framework::Executor* executor, paddle::framework::Scope* scope,
|
|
|
|
|
const std::string& dirname, const bool is_combined = false) {
|
|
|
|
|
const std::string& dirname, const bool is_combined = false,
|
|
|
|
|
const std::string& prog_filename = "__model_combined__",
|
|
|
|
|
const std::string& param_filename = "__params_combined__") {
|
|
|
|
|
std::unique_ptr<paddle::framework::ProgramDesc> inference_program;
|
|
|
|
|
if (is_combined) {
|
|
|
|
|
// All parameters are saved in a single file.
|
|
|
|
|
// Hard-coding the file names of program and parameters in unittest.
|
|
|
|
|
// The file names should be consistent with that used in Python API
|
|
|
|
|
// `fluid.io.save_inference_model`.
|
|
|
|
|
std::string prog_filename = "__model_combined__";
|
|
|
|
|
std::string param_filename = "__params_combined__";
|
|
|
|
|
inference_program =
|
|
|
|
|
paddle::inference::Load(executor, scope, dirname + "/" + prog_filename,
|
|
|
|
|
dirname + "/" + param_filename);
|
|
|
|
@ -115,12 +114,15 @@ std::unique_ptr<paddle::framework::ProgramDesc> InitProgram(
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<std::vector<int64_t>> GetFeedTargetShapes(
|
|
|
|
|
const std::string& dirname, const bool is_combined = false) {
|
|
|
|
|
const std::string& dirname, const bool is_combined = false,
|
|
|
|
|
const std::string& prog_filename = "__model_combined__",
|
|
|
|
|
const std::string& param_filename = "__params_combined__") {
|
|
|
|
|
auto place = paddle::platform::CPUPlace();
|
|
|
|
|
auto executor = paddle::framework::Executor(place);
|
|
|
|
|
auto* scope = new paddle::framework::Scope();
|
|
|
|
|
|
|
|
|
|
auto inference_program = InitProgram(&executor, scope, dirname, is_combined);
|
|
|
|
|
auto inference_program = InitProgram(&executor, scope, dirname, is_combined,
|
|
|
|
|
prog_filename, param_filename);
|
|
|
|
|
auto& global_block = inference_program->Block(0);
|
|
|
|
|
|
|
|
|
|
const std::vector<std::string>& feed_target_names =
|
|
|
|
@ -136,15 +138,6 @@ std::vector<std::vector<int64_t>> GetFeedTargetShapes(
|
|
|
|
|
return feed_target_shapes;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Compile(paddle::framework::ProgramDesc* program) {
|
|
|
|
|
std::unique_ptr<paddle::framework::ir::Graph> g(
|
|
|
|
|
new paddle::framework::ir::Graph(*program));
|
|
|
|
|
auto pass = paddle::framework::ir::PassRegistry::Instance().Get(
|
|
|
|
|
"graph_to_program_pass");
|
|
|
|
|
pass->SetNotOwned<paddle::framework::ProgramDesc>("program", program);
|
|
|
|
|
pass->Apply(std::move(g));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
template <typename Place, bool CreateVars = true, bool PrepareContext = false>
|
|
|
|
|
void TestInference(const std::string& dirname,
|
|
|
|
|
const std::vector<paddle::framework::LoDTensor*>& cpu_feeds,
|
|
|
|
@ -182,7 +175,6 @@ void TestInference(const std::string& dirname,
|
|
|
|
|
paddle::platform::DeviceContextPool::Instance().Get(place));
|
|
|
|
|
inference_program = InitProgram(&executor, scope, dirname, is_combined);
|
|
|
|
|
}
|
|
|
|
|
Compile(inference_program.get());
|
|
|
|
|
|
|
|
|
|
// Disable the profiler and print the timing information
|
|
|
|
|
paddle::platform::DisableProfiler(paddle::platform::EventSortingKey::kDefault,
|
|
|
|
@ -261,5 +253,3 @@ void TestInference(const std::string& dirname,
|
|
|
|
|
|
|
|
|
|
delete scope;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
USE_PASS(graph_to_program_pass);
|
|
|
|
|