add benchmark for inference (#14571)
parent
c52f65e071
commit
923c8e3332
@ -0,0 +1,2 @@
|
||||
cc_library(benchmark SRCS benchmark.cc DEPS enforce)
|
||||
cc_test(test_benchmark SRCS benchmark_tester.cc DEPS benchmark)
|
@ -0,0 +1,49 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/utils/benchmark.h"
|
||||
#include <sstream>
|
||||
#include "paddle/fluid/platform/enforce.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
|
||||
std::string Benchmark::SerializeToString() const {
|
||||
std::stringstream ss;
|
||||
ss << "-----------------------------------------------------\n";
|
||||
ss << "name\t";
|
||||
ss << "batch_size\t";
|
||||
ss << "num_threads\t";
|
||||
ss << "latency\t";
|
||||
ss << "qps";
|
||||
ss << '\n';
|
||||
|
||||
ss << name_ << "\t";
|
||||
ss << batch_size_ << "\t";
|
||||
ss << num_threads_ << "\t";
|
||||
ss << latency_ << "\t";
|
||||
ss << 1000 / latency_;
|
||||
ss << '\n';
|
||||
return ss.str();
|
||||
}
|
||||
void Benchmark::PersistToFile(const std::string &path) const {
|
||||
std::ofstream file(path, std::ios::app);
|
||||
PADDLE_ENFORCE(file.is_open(), "Can not open %s to add benchmark", path);
|
||||
file << SerializeToString();
|
||||
file.flush();
|
||||
file.close();
|
||||
}
|
||||
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,52 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
|
||||
/*
|
||||
* Helper class to calculate the performance.
|
||||
*/
|
||||
struct Benchmark {
|
||||
int batch_size() const { return batch_size_; }
|
||||
void SetBatchSize(int x) { batch_size_ = x; }
|
||||
|
||||
int num_threads() const { return num_threads_; }
|
||||
void SetNumThreads(int x) { num_threads_ = x; }
|
||||
|
||||
bool use_gpu() const { return use_gpu_; }
|
||||
void SetUseGpu() { use_gpu_ = true; }
|
||||
|
||||
int latency() const { return latency_; }
|
||||
void SetLatency(int x) { latency_ = x; }
|
||||
|
||||
const std::string& name() const { return name_; }
|
||||
void SetName(const std::string& name) { name_ = name; }
|
||||
|
||||
std::string SerializeToString() const;
|
||||
void PersistToFile(const std::string& path) const;
|
||||
|
||||
private:
|
||||
bool use_gpu_{false};
|
||||
int batch_size_{0};
|
||||
int latency_;
|
||||
int num_threads_{1};
|
||||
std::string name_;
|
||||
};
|
||||
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,39 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/utils/benchmark.h"
|
||||
#include <glog/logging.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
using namespace paddle::inference;
|
||||
TEST(Benchmark, basic) {
|
||||
Benchmark benchmark;
|
||||
benchmark.SetName("key0");
|
||||
benchmark.SetBatchSize(10);
|
||||
benchmark.SetUseGpu();
|
||||
benchmark.SetLatency(220);
|
||||
LOG(INFO) << "benchmark:\n" << benchmark.SerializeToString();
|
||||
}
|
||||
|
||||
TEST(Benchmark, PersistToFile) {
|
||||
Benchmark benchmark;
|
||||
benchmark.SetName("key0");
|
||||
benchmark.SetBatchSize(10);
|
||||
benchmark.SetUseGpu();
|
||||
benchmark.SetLatency(220);
|
||||
|
||||
benchmark.PersistToFile("1.log");
|
||||
benchmark.PersistToFile("1.log");
|
||||
benchmark.PersistToFile("1.log");
|
||||
}
|
Loading…
Reference in new issue