!2847 Add record for transform status

Merge pull request !2847 from Kang/master
pull/2847/MERGE
mindspore-ci-bot 5 years ago committed by Gitee
commit f6a45a8fab

@ -20,6 +20,7 @@
#include <deque>
#include <memory>
#include <unordered_set>
#include <unordered_map>
#include "ir/anf.h"
#include "ir/manager.h"
@ -191,15 +192,30 @@ bool SubstitutionList::operator()(const FuncGraphPtr &func_graph, const Optimize
FuncGraphManagerPtr manager = optimizer->manager();
manager->AddFuncGraph(func_graph);
// for transform status counting
size_t space = 0;
std::unordered_map<std::string, std::vector<bool>> status;
if (optimizer->is_on_debug_) {
for (size_t i = 0; i < list_.size(); i++) {
status[list_[i]->name_ + std::to_string(i)] = {};
}
}
bool loop = false;
bool changes = false;
do {
loop = false;
for (auto const &transform : list_) {
auto change = ApplyTransform(optimizer, func_graph->output(), transform);
for (size_t i = 0; i < list_.size(); i++) {
auto change = ApplyTransform(optimizer, func_graph->output(), list_[i]);
changes = changes || change;
loop = loop || change;
// record the status of each transform
if (optimizer->is_on_debug_) {
status[list_[i]->name_ + std::to_string(i)].push_back(change);
space = std::max(list_[i]->name_.size(), space);
}
}
if (is_once_) {
@ -207,6 +223,23 @@ bool SubstitutionList::operator()(const FuncGraphPtr &func_graph, const Optimize
}
} while (loop);
// display the status of each transform
if (optimizer->is_on_debug_) {
std::stringstream ss;
ss << std::endl
<< "Pass: " << optimizer->name() << "(" << optimizer->CurPass_.counter << ")_" << optimizer->CurPass_.name
<< std::endl;
for (size_t i = 0; i < list_.size(); i++) {
auto name = list_[i]->name_;
ss << std::left << std::setw(space + 4) << name << "\t";
for (auto change : status[name + std::to_string(i)]) {
ss << change << " ";
}
ss << std::endl;
}
MS_LOG(DEBUG) << ss.str();
}
return changes;
}
} // namespace opt

@ -95,6 +95,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
void Init(const OptPassGroupMap &passes, bool run_only_once) {
run_only_once_ = run_only_once;
is_watch_renormalize_ = false;
is_on_debug_ = IS_OUTPUT_ON(mindspore::DEBUG);
for (auto &iter : passes) {
const std::string &name = iter.first;
@ -144,6 +145,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
auto run_runc = [&counter, &func_graph, &changes, use_profile, this]() {
for (size_t i = 0; i < passes_.size(); ++i) {
const OptPass &opt = passes_[i];
CurPass_ = {counter, pass_names_[i]};
auto opt_func = [&func_graph, &changes, &opt, this]() {
if (opt.is_renormalize()) {
auto resource_ptr = std::dynamic_pointer_cast<pipeline::Resource>(resource_);
@ -173,7 +175,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
}
};
use_profile ? (WITH(MsProfile::GetProfile()->Step(pass_names_[i])) opt_func) : opt_func();
if (IS_OUTPUT_ON(mindspore::DEBUG) && MsContext::GetInstance()->save_graphs_flag()) {
if (is_on_debug_ && MsContext::GetInstance()->save_graphs_flag()) {
MS_LOG(DEBUG) << "The opt " << name_ << " round " << counter << " OptPass " << pass_names_[i] << " end.";
auto fg_name =
"opt_substep_" + name_ + "_r" + std::to_string(counter) + "_" + std::to_string(i) + "_" + pass_names_[i];
@ -217,6 +219,13 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
bool is_watch_renormalize() { return is_watch_renormalize_; }
void set_enable(bool enable) { is_enable_ = enable; }
struct {
int counter;
std::string name;
} CurPass_;
bool is_on_debug_{false};
private:
const std::string name_;
pipeline::ResourceBasePtr resource_;

Loading…
Cancel
Save