introduce a master switch to turn on and off profiler

pull/10361/head
Zirui Wu 5 years ago
parent 61ed05f133
commit a1355a827c

@ -68,7 +68,7 @@ class ConnectorThroughput : public Sampling {
// @return Status The status code returned
Status SaveToFile() override;
Status Init(const std::string &dir_path, const std::string &device_id);
Status Init(const std::string &dir_path, const std::string &device_id) override;
json ParseOpInfo(const DatasetOp &node, const std::vector<double> &thr);

@ -31,17 +31,11 @@ namespace mindspore {
namespace dataset {
// Constructor
ProfilingManager::ProfilingManager(ExecutionTree *tree) : tree_(tree) {
ProfilingManager::ProfilingManager(ExecutionTree *tree) : tree_(tree), enabled_(true) {
perf_monitor_ = std::make_unique<Monitor>(tree_);
}
bool ProfilingManager::IsProfilingEnable() const {
auto profiling = common::GetEnv("PROFILING_MODE");
if (profiling.empty() || profiling != "true") {
return false;
}
return true;
}
bool ProfilingManager::IsProfilingEnable() const { return common::GetEnv("PROFILING_MODE") == "true" && enabled_; }
Status ProfilingManager::Initialize() {
// Register nodes based on config
@ -64,7 +58,7 @@ Status ProfilingManager::Initialize() {
#endif
dir_path_ = real_path;
// If DEVICE_ID is not set,defult value is 0
// If DEVICE_ID is not set, default value is 0
device_id_ = common::GetEnv("DEVICE_ID");
if (device_id_.empty()) {
device_id_ = "0";

@ -102,9 +102,14 @@ class ProfilingManager {
// @return Status The status code returned
Status GetTracingNode(const std::string &name, std::shared_ptr<Tracing> *node);
// If profiling is enabled.
// return true if env variable has profiling enabled and enabled_ is set to true.
bool IsProfilingEnable() const;
// Calling this would disable Profiling functionality for the entire duration of ExecutionTree. It cannot be
// re-enabled. Each execution_tree is associated with a unique profiling_manager which will start when tree is
// launched. This is the master off switch, once called, it won't start profiler even if env variable says so.
void DisableProfiling() { enabled_ = false; }
const std::unordered_map<std::string, std::shared_ptr<Sampling>> &GetSamplingNodes() { return sampling_nodes_; }
// Launch monitoring thread.
@ -114,6 +119,7 @@ class ProfilingManager {
private:
std::unique_ptr<Monitor> perf_monitor_;
bool enabled_;
std::unordered_map<std::string, std::shared_ptr<Tracing>> tracing_nodes_;
std::unordered_map<std::string, std::shared_ptr<Sampling>> sampling_nodes_;
@ -128,9 +134,9 @@ class ProfilingManager {
// @return Status The status code returned
Status RegisterSamplingNode(std::shared_ptr<Sampling> node);
ExecutionTree *tree_ = nullptr; // ExecutionTree pointer
std::string dir_path_; // where to create profiling file
std::string device_id_; // used when create profiling file,filename_deviceid.suffix
ExecutionTree *tree_; // ExecutionTree pointer
std::string dir_path_; // where to create profiling file
std::string device_id_; // used when create profiling file,filename_device_id.suffix
};
enum ProfilingType { TIME, CONNECTOR_DEPTH };

@ -126,7 +126,8 @@ Status TreeAdapter::BuildExecutionTreeRecur(std::shared_ptr<DatasetNode> ir, std
Status TreeAdapter::Build(std::shared_ptr<DatasetNode> root_ir, int32_t num_epochs) {
// This will evolve in the long run
tree_ = std::make_unique<ExecutionTree>();
// disable profiling if this is only a getter pass
if (usage_ == kDeGetter) tree_->GetProfilingManager()->DisableProfiling();
// Build the Execution tree from the child of the IR root node, which represent the root of the input IR tree
std::shared_ptr<DatasetOp> root_op;
RETURN_IF_NOT_OK(BuildExecutionTreeRecur(root_ir->Children()[0], &root_op));

@ -40,6 +40,12 @@ def test_profiling_simple_pipeline():
data1 = ds.GeneratorDataset(source, ["data"])
data1 = data1.shuffle(64)
data1 = data1.batch(32)
# try output shape type and dataset size and make sure no profiling file is generated
assert data1.output_shapes() == [[32, 1]]
assert [str(tp) for tp in data1.output_types()] == ["int64"]
assert data1.get_dataset_size() == 32
assert os.path.exists(PIPELINE_FILE) is False
assert os.path.exists(DATASET_ITERATOR_FILE) is False
for _ in data1:
pass
@ -92,7 +98,7 @@ def test_profiling_complex_pipeline():
del os.environ['MINDDATA_PROFILING_DIR']
def test_profiling_sampling_iterval():
def test_profiling_sampling_interval():
"""
Test non-default monitor sampling interval
"""
@ -126,4 +132,4 @@ def test_profiling_sampling_iterval():
if __name__ == "__main__":
test_profiling_simple_pipeline()
test_profiling_complex_pipeline()
test_profiling_sampling_iterval()
test_profiling_sampling_interval()

Loading…
Cancel
Save