|
|
|
@ -27,96 +27,143 @@
|
|
|
|
|
|
|
|
|
|
namespace mindspore::lite {
|
|
|
|
|
void AnfImporterFromMetaGraph::ConverterConstTensor() {
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model);
|
|
|
|
|
auto *meta_graph = model->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model_);
|
|
|
|
|
auto *meta_graph = model_->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(meta_graph);
|
|
|
|
|
for (size_t i = 0; i < meta_graph->allTensors()->size(); i++) {
|
|
|
|
|
num_of_tensors_ = meta_graph->allTensors()->size();
|
|
|
|
|
for (size_t i = 0; i < num_of_tensors_; i++) {
|
|
|
|
|
auto *tensor = meta_graph->allTensors()->GetAs<schema::Tensor>(i);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(tensor);
|
|
|
|
|
if (tensor->nodeType() != schema::NodeType_ValueNode) {
|
|
|
|
|
if ((tensor->nodeType() != schema::NodeType_ValueNode) && (tensor->nodeType() != schema::NodeType_Parameter)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
MS_ASSERT(tensor->dims() != nullptr);
|
|
|
|
|
auto parameter = model->add_parameter();
|
|
|
|
|
auto parameter = model_->add_parameter();
|
|
|
|
|
std::vector<int> shape;
|
|
|
|
|
for (size_t j = 0; j < tensor->dims()->size(); ++j) {
|
|
|
|
|
shape.push_back(tensor->dims()->data()[j]);
|
|
|
|
|
}
|
|
|
|
|
auto type_id = static_cast<TypeId>(tensor->dataType());
|
|
|
|
|
auto type_id = static_cast<TypeId>(tensor->dataType()); // todo: check error
|
|
|
|
|
auto type_ptr = TypeIdToType(type_id);
|
|
|
|
|
auto abstract_tensor = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
|
|
|
|
|
parameter->set_abstract(abstract_tensor);
|
|
|
|
|
auto abstractBase = std::make_shared<abstract::AbstractTensor>(type_ptr, shape);
|
|
|
|
|
// XXX TODO copy format
|
|
|
|
|
parameter->set_abstract(abstractBase);
|
|
|
|
|
parameter->set_name(std::string("Parameter"));
|
|
|
|
|
|
|
|
|
|
ParamValueLitePtr param_value = std::make_shared<ParamValueLite>();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(param_value);
|
|
|
|
|
param_value->set_tensor_shape(shape);
|
|
|
|
|
param_value->set_tensor_type(type_id);
|
|
|
|
|
if (tensor->data() != nullptr) {
|
|
|
|
|
auto size = tensor->data()->size();
|
|
|
|
|
char *tensor_data = new char[size]();
|
|
|
|
|
std::memcpy(tensor_data, tensor->data()->data(), size);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(tensor_data);
|
|
|
|
|
param_value->set_tensor_addr(tensor_data);
|
|
|
|
|
param_value->set_tensor_size(size);
|
|
|
|
|
if (tensor->nodeType() == schema::NodeType_ValueNode) {
|
|
|
|
|
ParamValueLitePtr param_value = std::make_shared<ParamValueLite>();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(param_value);
|
|
|
|
|
param_value->set_tensor_shape(shape);
|
|
|
|
|
param_value->set_tensor_type(type_id);
|
|
|
|
|
if (tensor->data() != nullptr) {
|
|
|
|
|
auto size = tensor->data()->size();
|
|
|
|
|
char *tensor_data = new char[size]();
|
|
|
|
|
std::memcpy(tensor_data, tensor->data()->data(), size);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(tensor_data);
|
|
|
|
|
param_value->set_tensor_addr(tensor_data);
|
|
|
|
|
param_value->set_tensor_size(size);
|
|
|
|
|
}
|
|
|
|
|
parameter->set_default_param(param_value);
|
|
|
|
|
}
|
|
|
|
|
parameter->set_default_param(param_value);
|
|
|
|
|
AddNode(i, parameter);
|
|
|
|
|
model_->AddAnfNode(i, parameter);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int AnfImporterFromMetaGraph::ConverterCNode() {
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model);
|
|
|
|
|
auto *meta_graph = model->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model_);
|
|
|
|
|
auto *meta_graph = model_->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(meta_graph);
|
|
|
|
|
auto cNodes = meta_graph->nodes();
|
|
|
|
|
for (size_t i = 0; i < cNodes->size(); i++) {
|
|
|
|
|
auto cNode = cNodes->GetAs<schema::CNode>(i);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(cNode);
|
|
|
|
|
auto tensor_id = cNode->outputIndex()->data()[0];
|
|
|
|
|
if (GetNode(tensor_id)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
auto prim = std::make_shared<PrimitiveValue>(model->GetOp(cNode->name()->str()));
|
|
|
|
|
// Crate CNode -- Order of inputs is as follows
|
|
|
|
|
// First input should be the Primitive
|
|
|
|
|
// Then we have CNodes that contribute to this CNode
|
|
|
|
|
// Finally we Have the parameters
|
|
|
|
|
|
|
|
|
|
// first itteration -- create CNode with primitive, create originator map
|
|
|
|
|
for (size_t i = 0; i < meta_graph->nodes()->size(); i++) {
|
|
|
|
|
auto cNode = meta_graph->nodes()->GetAs<schema::CNode>(i);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(cNode);
|
|
|
|
|
auto prim = std::make_shared<PrimitiveValue>(model_->GetOp(cNode->name()->str()));
|
|
|
|
|
if (prim == nullptr) {
|
|
|
|
|
MS_LOG(ERROR) << "th tensorDef in subGraphDef is nullptr";
|
|
|
|
|
return RET_ERROR;
|
|
|
|
|
}
|
|
|
|
|
auto value_node = NewValueNode(prim);
|
|
|
|
|
AddNode(tensor_id, value_node);
|
|
|
|
|
|
|
|
|
|
// auto prim_name = std::string("PrimitivePy: ") + std::string(cNode->name()->c_str());
|
|
|
|
|
// value_node->set_fullname_with_scope(prim_name);
|
|
|
|
|
std::vector<AnfNodePtr> op_inputs = {value_node};
|
|
|
|
|
|
|
|
|
|
auto cnode = model_->NewCNode(op_inputs);
|
|
|
|
|
auto node_name = std::string(cNode->name()->c_str()) + std::to_string(i);
|
|
|
|
|
cnode->set_fullname_with_scope(node_name);
|
|
|
|
|
AddNode(num_of_tensors_ + i, cnode);
|
|
|
|
|
|
|
|
|
|
for (size_t j = 0; j < cNode->outputIndex()->size(); j++) {
|
|
|
|
|
int tensor_id = cNode->outputIndex()->data()[j];
|
|
|
|
|
originator_[tensor_id] = cnode;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// second itteration -- fill in input CNodes and Parameters
|
|
|
|
|
// populate map
|
|
|
|
|
for (size_t i = 0; i < meta_graph->nodes()->size(); i++) {
|
|
|
|
|
std::vector<int> input;
|
|
|
|
|
std::vector<int> output;
|
|
|
|
|
int tensor_id;
|
|
|
|
|
auto cNode = meta_graph->nodes()->GetAs<schema::CNode>(i);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(cNode);
|
|
|
|
|
auto cnode = std::dynamic_pointer_cast<CNode>(GetNode(num_of_tensors_ + i));
|
|
|
|
|
|
|
|
|
|
for (size_t j = 0; j < cNode->outputIndex()->size(); j++) {
|
|
|
|
|
tensor_id = cNode->outputIndex()->data()[j];
|
|
|
|
|
output.push_back(tensor_id);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
MS_EXCEPTION_IF_NULL(cNode->inputIndex());
|
|
|
|
|
for (size_t j = 0; j < cNode->inputIndex()->size(); j++) {
|
|
|
|
|
auto node = GetNode(*(cNode->inputIndex()->GetAs<uint32_t>(j)));
|
|
|
|
|
if (nullptr == node) {
|
|
|
|
|
MS_LOG(ERROR) << "Can't find input node.";
|
|
|
|
|
return RET_ERROR;
|
|
|
|
|
tensor_id = cNode->inputIndex()->data()[j];
|
|
|
|
|
input.push_back(tensor_id);
|
|
|
|
|
auto *tensor = meta_graph->allTensors()->GetAs<schema::Tensor>(tensor_id);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(tensor);
|
|
|
|
|
if ((tensor->nodeType() == schema::NodeType_Parameter) && (originator_[tensor_id] != nullptr)) {
|
|
|
|
|
cnode->add_input(originator_[tensor_id]);
|
|
|
|
|
}
|
|
|
|
|
// todo: CheckInputNodeType, the first node should be op;
|
|
|
|
|
op_inputs.push_back(node);
|
|
|
|
|
}
|
|
|
|
|
auto cnode = model->NewCNode(op_inputs);
|
|
|
|
|
auto node_name = std::string(cNode->name()->c_str());
|
|
|
|
|
cnode->set_fullname_with_scope(node_name);
|
|
|
|
|
AddNode(tensor_id, cnode);
|
|
|
|
|
// finally add all the Parameters (which are ValueNodes)
|
|
|
|
|
for (size_t j = 0; j < cNode->inputIndex()->size(); j++) {
|
|
|
|
|
tensor_id = cNode->inputIndex()->data()[j];
|
|
|
|
|
auto *tensor = meta_graph->allTensors()->GetAs<schema::Tensor>(tensor_id);
|
|
|
|
|
MS_EXCEPTION_IF_NULL(tensor);
|
|
|
|
|
if ((tensor->nodeType() == schema::NodeType_ValueNode) && (GetNode(tensor_id) != nullptr)) {
|
|
|
|
|
cnode->add_input(GetNode(tensor_id));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
model_->AddCNodeInputOutput(cnode->fullname_with_scope(), input, output);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return RET_OK;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void AnfImporterFromMetaGraph::AddReturnCNode() {
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model);
|
|
|
|
|
auto *meta_graph = model->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(model_);
|
|
|
|
|
auto *meta_graph = model_->GetMetaGraph();
|
|
|
|
|
MS_EXCEPTION_IF_NULL(meta_graph);
|
|
|
|
|
std::vector<int> input;
|
|
|
|
|
std::vector<int> output;
|
|
|
|
|
std::vector<AnfNodePtr> op_inputs;
|
|
|
|
|
auto value_node = NewValueNode(prim::kPrimReturn);
|
|
|
|
|
// value_node->set_fullname_with_scope("Primitive");
|
|
|
|
|
op_inputs.push_back(value_node);
|
|
|
|
|
auto tensor_id = meta_graph->outputIndex()->data()[0];
|
|
|
|
|
op_inputs.push_back(GetNode(tensor_id));
|
|
|
|
|
auto cnode = model->NewCNode(op_inputs);
|
|
|
|
|
for (int i = 0; i < meta_graph->outputIndex()->size(); i++) {
|
|
|
|
|
auto prev_cnode = originator_[meta_graph->outputIndex()->data()[i]];
|
|
|
|
|
if (prev_cnode != nullptr) op_inputs.push_back(prev_cnode);
|
|
|
|
|
input.push_back(meta_graph->outputIndex()->data()[i]);
|
|
|
|
|
}
|
|
|
|
|
auto cnode = model_->NewCNode(op_inputs);
|
|
|
|
|
cnode->set_fullname_with_scope("return");
|
|
|
|
|
model->set_return(cnode);
|
|
|
|
|
model_->set_return(cnode);
|
|
|
|
|
model_->AddCNodeInputOutput(cnode->fullname_with_scope(), input, output);
|
|
|
|
|
}
|
|
|
|
|
FuncGraphPtr AnfImporterFromMetaGraph::GetResult() { return this->model; }
|
|
|
|
|
FuncGraphPtr AnfImporterFromMetaGraph::GetResult() { return this->model_; }
|
|
|
|
|
} // namespace mindspore::lite
|
|
|
|
|
|
|
|
|
|