From: @selfws
Reviewed-by: @xchu42,@ji_chen
Signed-off-by: @ji_chen
pull/538/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 24537d87a2

@ -919,11 +919,11 @@ Status DataDumper::DumpExceptionInfo(const std::vector<rtExceptionInfo> exceptio
ReplaceStringElem(op_name);
ReplaceStringElem(op_type);
string dump_file_path =
"./" + op_type + "." + op_name + "." + to_string(op_desc_info.task_id) + "." + to_string(now_time);
"./" + op_type + "." + op_name + "." + std::to_string(op_desc_info.task_id) + "." + std::to_string(now_time);
GELOGI("The exception dump file path is %s", dump_file_path.c_str());
uint64_t proto_size = dump_data.ByteSizeLong();
unique_ptr<char[]> proto_msg(new (std::nothrow) char[proto_size]);
std::unique_ptr<char[]> proto_msg(new (std::nothrow) char[proto_size]);
bool ret = dump_data.SerializeToArray(proto_msg.get(), proto_size);
if (!ret || proto_size == 0) {
GELOGE(PARAM_INVALID, "Dump data proto serialize failed");

@ -41,7 +41,7 @@ class StreamSwitchTaskInfo : public TaskInfo {
Status CalculateArgs(const domi::TaskDef &task_def, DavinciModel *davinci_model) override;
private:
void SetInputAndValuePtr(DavinciModel *davinci_model, const vector<void *> &input_data_addrs);
void SetInputAndValuePtr(DavinciModel *davinci_model, const std::vector<void *> &input_data_addrs);
void *input_ptr_;
rtCondition_t cond_;
void *value_ptr_;
@ -49,7 +49,7 @@ class StreamSwitchTaskInfo : public TaskInfo {
uint32_t true_stream_id_;
rtSwitchDataType_t data_type_;
static const uint32_t kInputNum = 2;
vector<int64_t> fixed_addr_offset_;
std::vector<int64_t> fixed_addr_offset_;
};
} // namespace ge
#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_SWITCH_TASK_INFO_H_

@ -63,8 +63,8 @@ struct RuntimeParam {
};
typedef struct FusionOpInfo {
vector<string> original_op_names;
string op_name;
std::vector<std::string> original_op_names;
std::string op_name;
uint32_t op_index;
uint32_t stream_id;
} FusionOpInfo;

@ -82,7 +82,7 @@ Status HybridModelExecutor::ExecuteGraphInternal(SubgraphExecutor &executor,
Status HybridModelExecutor::Cleanup() {
GELOGD("Start to cleanup.");
context_.callback_manager->Destroy();
RuntimeInferenceContext::DestroyContext(to_string(context_.session_id));
RuntimeInferenceContext::DestroyContext(std::to_string(context_.session_id));
GELOGD("Cleanup successfully.");
return SUCCESS;
}

@ -78,8 +78,8 @@ HybridDavinciModel::~HybridDavinciModel() {
delete impl_;
}
unique_ptr<HybridDavinciModel> HybridDavinciModel::Create(const GeRootModelPtr &ge_root_model) {
auto instance = unique_ptr<HybridDavinciModel>(new (std::nothrow)HybridDavinciModel());
std::unique_ptr<HybridDavinciModel> HybridDavinciModel::Create(const GeRootModelPtr &ge_root_model) {
auto instance = std::unique_ptr<HybridDavinciModel>(new (std::nothrow)HybridDavinciModel());
if (instance != nullptr) {
instance->impl_ = new (std::nothrow) HybridDavinciModel::Impl(ge_root_model);
if (instance->impl_ != nullptr) {

@ -957,7 +957,7 @@ Status HybridModelBuilder::IndexTaskDefs() {
// index task defs
GELOGD("To index tasks for subgraph: %s", name.c_str());
unordered_map<int64_t, NodePtr> node_map;
std::unordered_map<int64_t, NodePtr> node_map;
for (const auto &node : sub_graph->GetDirectNode()) {
GE_CHECK_NOTNULL(node);
GE_CHECK_NOTNULL(node->GetOpDesc());

@ -405,7 +405,7 @@ Status ControlOpNodeExecutor::LoadTask(const HybridModel &model,
auto node_item = model.GetNodeItem(node);
GE_CHECK_NOTNULL(node_item);
unique_ptr<ControlOpNodeTask> node_task;
std::unique_ptr<ControlOpNodeTask> node_task;
auto node_type = node->GetType();
if (node_type == IF || node_type == STATELESSIF) {
node_task.reset(new(std::nothrow) IfOpNodeTask());

@ -19,8 +19,6 @@
#include "runtime/rt.h"
using namespace std;
namespace ge {
#define CC_FUSION_OP_MAX 32

Loading…
Cancel
Save