mindspore cxx api for 310 inference

Signed-off-by: zhoufeng <zhoufeng54@huawei.com>
pull/7177/head
zhoufeng 4 years ago
parent fd16535017
commit 183742009f

@ -69,19 +69,9 @@ include_directories(${PYTHON_INCLUDE_DIRS})
set(MS_CCSRC_PATH ${CMAKE_SOURCE_DIR}/mindspore/ccsrc)
set(MS_CCSRC_BUILD_PATH ${BUILD_PATH}/mindspore/mindspore/ccsrc)
if (ENABLE_GE)
link_directories(${CMAKE_SOURCE_DIR}/third_party/ge/lib)
elseif(ENABLE_D OR ENABLE_TESTCASES)
if (ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
include(${CMAKE_SOURCE_DIR}/cmake/dependency_graphengine.cmake)
endif()
if (ENABLE_GE OR ENABLE_D OR ENABLE_TESTCASES)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/external)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/framework)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc/toolchain)
endif()
endif ()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
add_subdirectory(mindspore/ccsrc)

@ -23,9 +23,9 @@ usage()
{
echo "Usage:"
echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-b ge] [-m infer|train] \\"
echo " [-a on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\"
echo " [-a on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|ascend|cpu|acl] \\"
echo " [-P on|off] [-z [on|off]] [-M on|off] [-V 9.2|10.1] [-I arm64|arm32|x86_64] [-K] \\"
echo " [-B on|off] [-w on|off] [-E] [-l on|off] [-n full|lite|off] [-T on|off] \\"
echo " [-B on|off] [-E] [-l on|off] [-n full|lite|off] [-T on|off] \\"
echo " [-A [cpp|java|object-c] [-C on|off] [-o on|off] [-S on|off] [-k on|off] [-W sse|neon|avx|off] \\"
echo ""
echo "Options:"
@ -45,7 +45,7 @@ usage()
echo " -i Enable increment building, default off"
echo " -L Enable load ANF-IR as input of 'infer', default off"
echo " -j[n] Set the threads when building (Default: -j8)"
echo " -e Use gpu, d or cpu"
echo " -e Use cpu, gpu, ascend or acl"
echo " -P Enable dump anf graph to file in ProtoBuffer format, default on"
echo " -D Enable dumping of function graph ir, default on"
echo " -z Compile dataset & mindrecord, default on"
@ -55,7 +55,6 @@ usage()
echo " -I Enable compiling mindspore lite for arm64, arm32 or x86_64, default disable mindspore lite compilation"
echo " -K Compile with AKG, default on"
echo " -s Enable serving module, default off"
echo " -w Enable acl module, default off"
echo " -B Enable debugger, default on"
echo " -E Enable IBVERBS for parameter server, default off"
echo " -l Compile with python dependency, default on"
@ -225,6 +224,9 @@ checkopts()
ENABLE_D="on"
ENABLE_CPU="on"
ENABLE_SERVING="on"
elif [[ "X$OPTARG" == "Xacl" ]]; then
ENABLE_SERVING="on"
ENABLE_ACL="on"
elif [[ "X$OPTARG" == "Xcpu" ]]; then
ENABLE_CPU="on"
else

@ -11,7 +11,7 @@ include(${GE_SOURCE_DIR}/cmake/external_libs/onnx.cmake)
include(${GE_SOURCE_DIR}/cmake/external_libs/securec.cmake)
# for UT, find slog and error_manager from local prebuild
if (NOT ENABLE_D)
if (NOT ENABLE_D AND NOT ENABLE_ACL)
set(GE_PREBUILD_PATH ${GE_SOURCE_DIR}/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR})
find_library(slog libslog.so ${GE_PREBUILD_PATH})
find_library(error_manager liberror_manager.so ${GE_PREBUILD_PATH})
@ -28,6 +28,7 @@ elseif (DEFINED ENV{D_LINK_PATH})
message(FATAL_ERROR "Running on a unsupported architecture: ${SYSTEM_TYPE}, build terminated")
endif()
set(GE_LIB_PATH ${GE_LIB_PATH}/${GE_SYS_ARCH})
find_library(c_sec libc_sec.so ${GE_LIB_PATH})
find_library(slog libslog.so ${GE_LIB_PATH})
find_library(mmpa libmmpa.so ${GE_LIB_PATH})
find_library(runtime libruntime.so ${GE_LIB_PATH})
@ -44,8 +45,8 @@ else()
else()
set(ASCEND_PATH /usr/local/Ascend)
endif()
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common)
set(ASCEND_RUNTIME_PATH ${ASCEND_PATH}/fwkacllib/lib64)
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common ${ASCEND_PATH}/driver/lib64)
set(ASCEND_RUNTIME_PATH ${ASCEND_PATH}/fwkacllib/lib64 ${ASCEND_PATH}/acllib/lib64 ${ASCEND_PATH}/atc/lib64)
find_library(c_sec libc_sec.so ${ASCEND_DRIVER_PATH})
find_library(slog libslog.so ${ASCEND_DRIVER_PATH})
find_library(mmpa libmmpa.so ${ASCEND_DRIVER_PATH})
@ -76,9 +77,11 @@ string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
# force __FILE__ to show relative path of file, from source directory
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D__FILE__='\"$(subst $(realpath ${CMAKE_SOURCE_DIR})/,,$(abspath $<))\"' -Wno-builtin-macro-redefined")
add_subdirectory(${GE_SOURCE_DIR}/src/common/graph)
if(ENABLE_D)
if (ENABLE_ACL OR ENABLE_D)
add_subdirectory(${GE_SOURCE_DIR}/src/ge/common)
if (ENABLE_D)
add_subdirectory(${GE_SOURCE_DIR}/src/ge/ge_runtime)
endif()
endif ()
endif ()
set(CMAKE_CXX_FLAGS ${_ge_tmp_CMAKE_CXX_FLAGS})

@ -58,13 +58,22 @@ if (ENABLE_GE)
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include)
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include/external)
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include/external/graph)
elseif(ENABLE_D OR ENABLE_TESTCASES)
link_directories(${CMAKE_SOURCE_DIR}/third_party/ge/lib)
elseif(ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/ops)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external/graph)
endif()
if (ENABLE_GE OR ENABLE_D OR ENABLE_ACL OR ENABLE_TESTCASES)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/framework)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc)
include_directories(${CMAKE_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc/toolchain)
endif()
if (ENABLE_MINDDATA)
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/icu4c.cmake)
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/libtiff.cmake)

@ -19,6 +19,7 @@ option(ENABLE_AKG "enable akg" OFF)
option(ENABLE_DEBUGGER "enable debugger" OFF)
option(ENABLE_IBVERBS "enable IBVERBS for parameter server" OFF)
option(ENABLE_PYTHON "Enable python" ON)
option(ENABLE_ACL "enable acl" OFF)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (WIN32)

@ -58,6 +58,12 @@ install(
COMPONENT mindspore
)
install(
TARGETS mindspore_shared_lib
LIBRARY DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
install(
TARGETS mindspore_gvar
DESTINATION ${INSTALL_LIB_DIR}
@ -194,7 +200,7 @@ if (ENABLE_SERVING OR ENABLE_TESTCASES)
endif ()
if (NOT ENABLE_GE)
if (ENABLE_D)
if (ENABLE_D OR ENABLE_ACL)
if (DEFINED ENV{ASCEND_CUSTOM_PATH})
set(ASCEND_PATH $ENV{ASCEND_CUSTOM_PATH})
else ()
@ -203,19 +209,26 @@ if (NOT ENABLE_GE)
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common)
install(
FILES
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/common/libge_common.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/ge_runtime/libge_runtime.so
${CMAKE_SOURCE_DIR}/build/graphengine/libc_sec.so
FILES ${CMAKE_SOURCE_DIR}/build/graphengine/libc_sec.so
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
if (ENABLE_D)
install(
TARGETS ms_profile
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
install(
FILES
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/common/libge_common.so
${CMAKE_BINARY_DIR}/graphengine/src/ge/ge_runtime/libge_runtime.so
DESTINATION ${INSTALL_LIB_DIR}
COMPONENT mindspore
)
endif ()
elseif (ENABLE_TESTCASES)
install(
FILES
@ -287,6 +300,13 @@ if (EXISTS ${CMAKE_SOURCE_DIR}/mindspore/dataset)
)
endif ()
## Public header files
install(
DIRECTORY ${CMAKE_SOURCE_DIR}/include
DESTINATION ${INSTALL_BASE_DIR}
COMPONENT mindspore
)
if (ENABLE_SERVING)
install(
TARGETS ms_serving

@ -1 +1 @@
Subproject commit 423c0228e8c421f2b095e40d14e9fb3b563f63aa
Subproject commit 42d217fb8cec74b1c73685b8abe94d5f1520e9fe

@ -0,0 +1,113 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_CELL_H
#define MINDSPORE_INCLUDE_API_CELL_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
namespace mindspore {
namespace api {
class InputAndOutput;
using Input = InputAndOutput;
using Output = InputAndOutput;
class MS_API CellBase {
public:
CellBase() = default;
virtual ~CellBase() = default;
virtual std::vector<Output> Construct(const std::vector<Input> &inputs) { return {}; }
virtual std::shared_ptr<CellBase> Clone() const = 0;
std::vector<Output> operator()(const std::vector<Input> &inputs) const;
};
template <class T>
class MS_API Cell : public CellBase {
public:
virtual ~Cell() = default;
std::shared_ptr<CellBase> Clone() const override {
return std::make_shared<T>(static_cast<const T&>(*this));
}
};
class MS_API ParameterCell final : public Cell<ParameterCell> {
public:
ParameterCell() = default;
~ParameterCell() override = default;
ParameterCell(const ParameterCell &);
ParameterCell &operator=(const ParameterCell &);
ParameterCell(ParameterCell &&);
ParameterCell &operator=(ParameterCell &&);
explicit ParameterCell(const Tensor &);
ParameterCell &operator=(const Tensor &);
explicit ParameterCell(Tensor &&);
ParameterCell &operator=(Tensor &&);
Tensor GetTensor() const { return tensor_; }
private:
Tensor tensor_;
};
class MS_API OpCellBase : public CellBase {
public:
explicit OpCellBase(const std::string &name) : name_(name) {}
~OpCellBase() override = default;
const std::string &GetOpType() const { return name_; }
protected:
std::string name_;
};
template <class T>
class MS_API OpCell : public OpCellBase, public std::enable_shared_from_this<T> {
public:
explicit OpCell(const std::string &name) : OpCellBase(name) {}
~OpCell() override = default;
std::shared_ptr<CellBase> Clone() const override {
return std::make_shared<T>(static_cast<const T&>(*this));
}
};
class MS_API InputAndOutput {
public:
InputAndOutput();
~InputAndOutput() = default;
// no explicit
InputAndOutput(const Tensor &); // NOLINT(runtime/explicit)
InputAndOutput(Tensor &&); // NOLINT(runtime/explicit)
InputAndOutput(const std::shared_ptr<CellBase> &, const std::vector<InputAndOutput> &, int32_t index);
int32_t GetIndex() const { return index_; }
void SetIndex(int32_t index) { index_ = index; }
private:
std::shared_ptr<CellBase> cell_;
std::vector<InputAndOutput> prev_;
int32_t index_;
};
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_CELL_H

@ -0,0 +1,58 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_MODEL_H
#define MINDSPORE_INCLUDE_API_MODEL_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
namespace mindspore {
namespace api {
class ModelImpl;
// todo: minddata c++ interface
class DataSet {};
class NetWork {};
class MS_API Model {
public:
Model(const std::string &device_type, uint32_t device_id);
Model(NetWork network, const std::string &device_type, uint32_t device_id);
~Model();
Model(const Model &) = delete;
void operator=(const Model &) = delete;
Status LoadModel(const Buffer &model_data, ModelType type, const std::map<std::string, std::string> &options);
Status LoadModel(const std::string &file_name, ModelType type, const std::map<std::string, std::string> &options);
Status UnloadModel();
Status Train(const DataSet &dataset, std::map<std::string, Buffer> *outputs);
Status Eval(const DataSet &dataset, std::map<std::string, Buffer> *outputs);
Status Predict(const std::map<std::string, Buffer> &inputs, std::map<std::string, Buffer> *outputs);
Status Predict(const std::vector<Buffer> &inputs, std::map<std::string, Buffer> *outputs);
Status GetInputsInfo(std::vector<Tensor> *tensor_list) const;
Status GetOutputsInfo(std::vector<Tensor> *tensor_list) const;
private:
std::shared_ptr<ModelImpl> impl_;
};
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_MODEL_H

@ -0,0 +1,50 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_OPS_OPS_H
#define MINDSPORE_INCLUDE_API_OPS_OPS_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
#include "include/api/cell.h"
namespace mindspore {
namespace api {
struct MS_API Conv2D : public OpCell<Conv2D> {
Conv2D() : OpCell("Conv2D") {}
~Conv2D() override = default;
std::vector<Output> Construct(const std::vector<Input> &inputs) override;
Conv2D(int out_channel, const std::vector<int> &kernel_size, int mode = 1, const std::string &pad_mode = "valid",
const std::vector<int> &pad = {0, 0, 0, 0}, const std::vector<int> &stride = {1, 1, 1, 1},
const std::vector<int> &dilation = {1, 1, 1, 1}, int group = 1);
Output operator()(const Input &, const Input &) const;
int out_channel;
std::vector<int> kernel_size;
int mode = 1;
std::string pad_mode = "valid";
std::vector<int> pad = {0, 0, 0, 0};
std::vector<int> stride = {1, 1, 1, 1};
std::vector<int> dilation = {1, 1, 1, 1};
int group = 1;
};
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_OPS_OPS_H

@ -0,0 +1,38 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_SERIALIZATION_H
#define MINDSPORE_INCLUDE_API_SERIALIZATION_H
#include <string>
#include <vector>
#include <map>
#include <memory>
#include "include/api/status.h"
#include "include/api/types.h"
#include "include/api/model.h"
namespace mindspore {
namespace api {
class MS_API Serialization {
public:
static Status LoadCheckPoint(const std::string &ckpt_file, std::map<std::string, Buffer> *parameters);
static Status SetParameters(const std::map<std::string, Buffer> &parameters, Model *model);
static Status ExportModel(const Model &model, ModelType model_type, Buffer *model_data);
static Status ExportModel(const Model &model, ModelType model_type, const std::string &model_file);
};
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_SERIALIZATION_H

@ -0,0 +1,53 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_STATUS_H
#define MINDSPORE_INCLUDE_API_STATUS_H
#include <string>
namespace mindspore {
namespace api {
enum StatusCode {
SUCCESS = 0,
FAILED,
INVALID_INPUTS,
// insert new status code here
UNKNOWN = 0xFFFFFFFF
};
class Status {
public:
Status() : status_code_(FAILED) {}
Status(enum StatusCode status_code, const std::string &status_msg = "") // NOLINT(runtime/explicit)
: status_code_(status_code), status_msg_(status_msg) {}
~Status() = default;
bool IsSuccess() const { return status_code_ == SUCCESS; }
enum StatusCode StatusCode() const { return status_code_; }
std::string StatusMessage() const { return status_msg_; }
bool operator==(const Status &other) const { return status_code_ == other.status_code_; }
bool operator==(enum StatusCode other_code) const { return status_code_ == other_code; }
bool operator!=(const Status &other) const { return status_code_ != other.status_code_; }
bool operator!=(enum StatusCode other_code) const { return status_code_ != other_code; }
operator bool() const = delete;
private:
enum StatusCode status_code_;
std::string status_msg_;
};
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_STATUS_H

@ -0,0 +1,119 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_INCLUDE_API_TYPES_H
#define MINDSPORE_INCLUDE_API_TYPES_H
#include <string>
#include <vector>
#include <memory>
#define MS_API __attribute__((visibility("default")))
namespace mindspore {
namespace api {
enum ModelType {
kMindIR = 0,
kAIR = 1,
kOM = 2,
kONNX = 3,
// insert new data type here
kUnknownType = 0xFFFFFFFF
};
enum DataType {
kMsUnknown = 0,
kMsBool = 1,
kMsInt8 = 2,
kMsInt16 = 3,
kMsInt32 = 4,
kMsInt64 = 5,
kMsUint8 = 6,
kMsUint16 = 7,
kMsUint32 = 8,
kMsUint64 = 9,
kMsFloat16 = 10,
kMsFloat32 = 11,
kMsFloat64 = 12,
// insert new data type here
kInvalidDataType = 0xFFFFFFFF
};
class MS_API Tensor {
public:
Tensor();
Tensor(const std::string &name, DataType type, const std::vector<int64_t> &shape, const void *data, size_t data_len);
~Tensor();
const std::string &Name() const;
void SetName(const std::string &name);
api::DataType DataType() const;
void SetDataType(api::DataType type);
const std::vector<int64_t> &Shape() const;
void SetShape(const std::vector<int64_t> &shape);
const void *Data() const;
void *MutableData();
size_t DataSize() const;
bool ResizeData(size_t data_len);
bool SetData(const void *data, size_t data_len);
int64_t ElementNum() const;
static int GetTypeSize(api::DataType type);
Tensor Clone() const;
private:
class Impl;
std::shared_ptr<Impl> impl_;
};
class MS_API Buffer {
public:
Buffer();
Buffer(const void *data, size_t data_len);
~Buffer();
const void *Data() const;
void *MutableData();
size_t DataSize() const;
bool ResizeData(size_t data_len);
bool SetData(const void *data, size_t data_len);
Buffer Clone() const;
private:
class Impl;
std::shared_ptr<Impl> impl_;
};
constexpr auto kModelOptionDumpCfgPath = "mindspore.option.dump_config_file_path";
constexpr auto kModelOptionDvppCfgPath = "mindspore.option.dvpp_config_file_path";
constexpr auto kModelOptionInsertOpCfgPath = "mindspore.option.insert_op_config_file_path"; // aipp config file
constexpr auto kModelOptionInputFormat = "mindspore.option.input_format"; // nchw or nhwc
// Mandatory while dynamic batch: e.g. "input_op_name1: n1,c2,h3,w4;input_op_name2: n4,c3,h2,w1"
constexpr auto kModelOptionInputShape = "mindspore.option.input_shape";
constexpr auto kModelOptionDynamicBatchSize = "mindspore.option.dynamic_batch_size";
constexpr auto kModelOptionDynamicImageSize = "mindspore.option.dynamic_image_size";
constexpr auto kModelOptionDynamicDims = "mindspore.option.dynamic_dims";
constexpr auto kModelOptionSerialInput = "mindspore.option.serial_inputs_name"; // separated by ';'
constexpr auto kModelOptionOutputNode = "mindspore.option.output_node"; // e.g. "node_name1:0;node_name2:1"
constexpr auto kModelOptionOutputType = "mindspore.option.output_type"; // "FP32", "UINT8" or "FP16", default as "FP32"
} // namespace api
} // namespace mindspore
#endif // MINDSPORE_INCLUDE_API_TYPES_H

@ -295,6 +295,9 @@ else ()
target_link_libraries(mindspore ibverbs rdmacm)
endif()
endif()
if (ENABLE_ACL)
target_link_libraries(_c_expression PRIVATE graph)
endif ()
target_link_libraries(_c_expression PRIVATE -Wl,--whole-archive mindspore proto_input -Wl,--no-whole-archive)
target_link_libraries(_c_expression PRIVATE mindspore::pybind11_module)
target_link_libraries(_c_expression PRIVATE mindspore_gvar)
@ -359,3 +362,5 @@ if (CMAKE_SYSTEM_NAME MATCHES "Linux")
elseif (CMAKE_SYSTEM_NAME MATCHES "Darwin")
set_target_properties(inference PROPERTIES MACOSX_RPATH ON)
endif ()
add_subdirectory(cxx_api)

@ -0,0 +1,62 @@
# build mindspore_shared_lib
set(LOAD_ONNX_SRC
${CMAKE_SOURCE_DIR}/mindspore/ccsrc/utils/load_onnx/anf_converter.cc
${CMAKE_SOURCE_DIR}/mindspore/ccsrc/utils/load_onnx/anf_model_parser.cc
)
file(GLOB_RECURSE API_OPS_SRC ${CMAKE_CURRENT_SOURCE_DIR} "ops/*.cc")
if (ENABLE_ACL)
file(GLOB_RECURSE API_ACL_SRC ${CMAKE_CURRENT_SOURCE_DIR} "model/acl/*.cc")
endif ()
set(MSLIB_SRC ${CMAKE_CURRENT_SOURCE_DIR}/types.cc
${CMAKE_CURRENT_SOURCE_DIR}/cell.cc
${CMAKE_CURRENT_SOURCE_DIR}/serialization.cc
${CMAKE_CURRENT_SOURCE_DIR}/model/model.cc
${API_ACL_SRC}
${API_OPS_SRC}
${LOAD_ONNX_SRC})
add_library(mindspore_shared_lib SHARED ${MSLIB_SRC})
set_target_properties(mindspore_shared_lib PROPERTIES OUTPUT_NAME mindspore PUBLIC_HEADER "${API_INCLUDE}")
target_link_libraries(mindspore_shared_lib PRIVATE ${PYTHON_LIBRARIES} ${SECUREC_LIBRARY}
-Wl,--whole-archive mindspore -Wl,--no-whole-archive mindspore_gvar mindspore::protobuf)
if (ENABLE_CPU)
target_link_libraries(mindspore_shared_lib PRIVATE mindspore::dnnl mindspore::mkldnn)
endif ()
if (USE_GLOG)
target_link_libraries(mindspore_shared_lib PRIVATE mindspore::glog)
endif ()
if (CMAKE_SYSTEM_NAME MATCHES "Linux")
target_link_options(mindspore_shared_lib PRIVATE -Wl,-init,common_log_init)
endif ()
if (ENABLE_ACL)
if (DEFINED ENV{ASCEND_CUSTOM_PATH})
set(ASCEND_PATH $ENV{ASCEND_CUSTOM_PATH})
else ()
set(ASCEND_PATH /usr/local/Ascend)
endif ()
set(ACL_LIB_DIR ${ASCEND_PATH}/acllib/)
set(ATLAS_ACL_LIB_DIR ${ASCEND_PATH}/ascend-toolkit/latest/acllib)
set(ATC_DIR ${ASCEND_PATH}/atc/)
set(ATLAS_ATC_DIR ${ASCEND_PATH}/ascend-toolkit/latest/atc)
MESSAGE("acl lib dir " ${ACL_LIB_DIR} ", atc dir " ${ATC_DIR})
MESSAGE("atlas acl lib dir " ${ATLAS_ACL_LIB_DIR} ", atc dir " ${ATLAS_ATC_DIR})
include_directories(${ACL_LIB_DIR}/include/)
include_directories(${ATLAS_ACL_LIB_DIR}/include/)
add_compile_definitions(ENABLE_DVPP_INTERFACE)
find_library(acl libascendcl.so ${ACL_LIB_DIR}/lib64 ${ATLAS_ACL_LIB_DIR}/lib64)
find_library(acl_retr libacl_retr.so ${ACL_LIB_DIR}/lib64 ${ATLAS_ACL_LIB_DIR}/lib64)
find_library(acl_cblas libacl_cblas.so ${ACL_LIB_DIR}/lib64 ${ATLAS_ACL_LIB_DIR}/lib64)
find_library(acl_dvpp libacl_dvpp.so ${ACL_LIB_DIR}/lib64 ${ATLAS_ACL_LIB_DIR}/lib64)
find_library(acl_runtime libruntime.so ${ACL_LIB_DIR}/lib64 ${ATLAS_ACL_LIB_DIR}/lib64)
find_library(ge_compiler libge_compiler.so ${ATC_DIR}/lib64 ${ATLAS_ATC_DIR}/lib64)
target_link_libraries(mindspore_shared_lib PRIVATE ${acl} ${acl_retr} ${acl_cblas} ${acl_dvpp} ${acl_runtime}
${ge_compiler} mindspore::jpeg_turbo)
endif ()

@ -0,0 +1,63 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "include/api/cell.h"
namespace mindspore::api {
std::vector<Output> CellBase::operator()(const std::vector<Input> &inputs) const { return Clone()->Construct(inputs); }
ParameterCell::ParameterCell(const ParameterCell &cell) : tensor_(cell.tensor_.Clone()) {}
ParameterCell &ParameterCell::operator=(const ParameterCell &cell) {
if (&cell == this) {
return *this;
}
tensor_ = cell.tensor_.Clone();
return *this;
}
ParameterCell::ParameterCell(ParameterCell &&cell) : tensor_(cell.tensor_) {}
ParameterCell &ParameterCell::operator=(ParameterCell &&cell) {
if (&cell == this) {
return *this;
}
tensor_ = cell.tensor_;
return *this;
}
ParameterCell::ParameterCell(const Tensor &tensor) : tensor_(tensor.Clone()) {}
ParameterCell &ParameterCell::operator=(const Tensor &tensor) {
tensor_ = tensor.Clone();
return *this;
}
ParameterCell::ParameterCell(Tensor &&tensor) : tensor_(tensor) {}
ParameterCell &ParameterCell::operator=(Tensor &&tensor) {
tensor_ = tensor;
return *this;
}
InputAndOutput::InputAndOutput() : cell_(nullptr), prev_(), index_(-1) {}
InputAndOutput::InputAndOutput(const Tensor &tensor)
: cell_(std::make_shared<ParameterCell>(tensor.Clone())), prev_(), index_(-1) {}
InputAndOutput::InputAndOutput(Tensor &&tensor) : cell_(std::make_shared<ParameterCell>(tensor)), prev_(), index_(-1) {}
InputAndOutput::InputAndOutput(const std::shared_ptr<CellBase> &cell, const std::vector<InputAndOutput> &prev,
int32_t index)
: cell_(cell), prev_(prev), index_(index) {}
} // namespace mindspore::api

File diff suppressed because it is too large Load Diff

@ -0,0 +1,99 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_CXX_API_ACL_MODEL_H
#define MINDSPORE_CCSRC_CXX_API_ACL_MODEL_H
#include <vector>
#include <string>
#include <unordered_map>
#include <utility>
#include <memory>
#include <map>
#include "ir/anf.h"
#include "include/api/status.h"
#include "cxx_api/model/model_impl.h"
#include "cxx_api/model/acl/dvpp_process.h"
#include "cxx_api/model/acl/model_process.h"
#include "cxx_api/model/acl/model_converter.h"
#include "cxx_api/model/acl/acl_model_options.h"
#include "ir/tensor.h"
namespace mindspore::api {
class AclModel : public ModelImpl {
public:
explicit AclModel(uint32_t device_id)
: init_flag_(false),
load_flag_(false),
device_type_("AscendCL"),
device_id_(device_id),
context_(nullptr),
stream_(nullptr),
acl_env_(nullptr),
model_process_(),
dvpp_process_(),
model_converter_(),
options_(nullptr) {}
~AclModel() = default;
Status LoadModel(const Buffer &model_data, ModelType type,
const std::map<std::string, std::string> &options) override;
Status LoadModel(const std::string &file_name, ModelType type,
const std::map<std::string, std::string> &options) override;
Status UnloadModel() override;
Status Train(const DataSet &dataset, std::map<std::string, Buffer> *outputs) override;
Status Eval(const DataSet &dataset, std::map<std::string, Buffer> *outputs) override;
Status Predict(const std::map<std::string, Buffer> &inputs, std::map<std::string, Buffer> *outputs) override;
Status GetInputsInfo(std::vector<Tensor> *tensor_list) const override;
Status GetOutputsInfo(std::vector<Tensor> *tensor_list) const override;
private:
bool init_flag_;
bool load_flag_;
std::string device_type_;
int32_t device_id_;
aclrtContext context_;
aclrtStream stream_;
class AclEnvGuard;
std::shared_ptr<AclEnvGuard> acl_env_;
static std::weak_ptr<AclEnvGuard> global_acl_env_;
static std::mutex global_acl_env_mutex_;
ModelProcess model_process_;
DvppProcess dvpp_process_;
ModelConverter model_converter_;
std::unique_ptr<AclModelOptions> options_;
Status InitEnv();
Status FinalizeEnv();
};
class AclModel::AclEnvGuard {
public:
explicit AclEnvGuard(const std::string &cfg_file);
~AclEnvGuard();
aclError GetErrno() const { return errno_; }
private:
aclError errno_;
};
API_REG_MODEL(AscendCL, AclModel);
} // namespace mindspore::api
#endif // MINDSPORE_CCSRC_CXX_API_ACL_MODEL_H

@ -0,0 +1,66 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cxx_api/model/acl/acl_model_options.h"
#include <memory>
#include "external/ge/ge_api_types.h"
namespace mindspore::api {
static std::string ParseOption(const std::map<std::string, std::string> &options, const std::string &key) {
auto iter = options.find(key);
if (iter != options.end()) {
return iter->second;
}
return "";
}
AclModelOptions::AclModelOptions(const std::map<std::string, std::string> &options) {
dump_cfg_path = ParseOption(options, kModelOptionDumpCfgPath);
dvpp_cfg_path = ParseOption(options, kModelOptionDvppCfgPath);
output_node = ParseOption(options, kModelOptionOutputNode);
// to acl
insert_op_cfg_path = ParseOption(options, kModelOptionInsertOpCfgPath);
input_format = ParseOption(options, kModelOptionInputFormat);
input_shape = ParseOption(options, kModelOptionInputShape);
dynamic_batch_size = ParseOption(options, kModelOptionInputShape);
dynamic_image_size = ParseOption(options, kModelOptionInputShape);
dynamic_dims = ParseOption(options, kModelOptionInputShape);
serial_nodes_name = ParseOption(options, kModelOptionSerialInput);
output_type = ParseOption(options, kModelOptionOutputType);
}
std::map<std::string, std::string> AclModelOptions::GenAclOptions() const {
const std::map<std::string const *, std::string> acl_options_map = {
{&insert_op_cfg_path, ge::ir_option::INSERT_OP_FILE},
{&input_format, ge::ir_option::INPUT_FORMAT},
{&input_shape, ge::ir_option::INPUT_SHAPE},
{&dynamic_batch_size, ge::ir_option::DYNAMIC_BATCH_SIZE},
{&dynamic_image_size, ge::ir_option::DYNAMIC_IMAGE_SIZE},
{&dynamic_dims, ge::ir_option::DYNAMIC_DIMS},
{&serial_nodes_name, ge::ir_option::INPUT_FP16_NODES},
{&output_type, ge::ir_option::OUTPUT_TYPE},
};
std::map<std::string, std::string> acl_options;
for (auto [ms_option, acl_option_key] : acl_options_map) {
if (ms_option == nullptr || ms_option->empty()) {
continue;
}
acl_options.emplace(acl_option_key, *ms_option);
}
return acl_options;
}
} // namespace mindspore::api

@ -0,0 +1,47 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_OPTION_PARSER_H
#define MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_OPTION_PARSER_H
#include <vector>
#include <string>
#include <map>
#include "include/api/types.h"
#include "include/api/status.h"
namespace mindspore::api {
struct AclModelOptions {
std::string dump_cfg_path;
std::string dvpp_cfg_path;
std::string output_node; // todo: at convert.cc::BuildGraph(), no atc options
// build options
std::string insert_op_cfg_path;
std::string input_format;
std::string input_shape;
std::string dynamic_batch_size;
std::string dynamic_image_size;
std::string dynamic_dims;
std::string serial_nodes_name;
std::string output_type;
explicit AclModelOptions(const std::map<std::string, std::string> &options);
~AclModelOptions() = default;
std::map<std::string, std::string> GenAclOptions() const;
};
} // namespace mindspore::api
#endif // MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_OPTION_PARSER_H

File diff suppressed because it is too large Load Diff

@ -0,0 +1,160 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_DVPP_PROCESS_H
#define MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_DVPP_PROCESS_H
#include <vector>
#include <string>
#include <map>
#include "acl/acl.h"
#include "acl/acl_mdl.h"
#include "acl/acl_rt.h"
#include "acl/ops/acl_dvpp.h"
#include "include/api/status.h"
namespace mindspore::api {
struct DvppDecodePara {
acldvppPixelFormat pixel_format = PIXEL_FORMAT_YUV_SEMIPLANAR_420;
};
struct DvppResizePara {
uint32_t output_width = 0;
uint32_t output_height = 0;
};
enum DvppCropType {
// crop left,top,right,bottom is given in config
kDvppCropTypeOffset = 0,
// crop left,top,right,bottom is calculated by image width/height and output crop width/height
kDvppCropTypeCentre = 1,
};
struct DvppRoiArea {
uint32_t left = 0;
uint32_t top = 0;
uint32_t right = 0;
uint32_t bottom = 0;
};
struct DvppCropInfo {
DvppCropType crop_type = kDvppCropTypeOffset;
DvppRoiArea crop_area; // when kDvppCropTypeOffset
uint32_t crop_width = 0; // when kDvppCropTypeCentre
uint32_t crop_height = 0; // when kDvppCropTypeCentre
};
struct DvppCropPara {
DvppCropInfo crop_info;
uint32_t output_width = 0;
uint32_t output_height = 0;
};
struct DvppCropAndPastePara {
DvppCropInfo crop_info;
DvppRoiArea paste_area;
uint32_t output_width = 0;
uint32_t output_height = 0;
};
class DvppProcess {
public:
DvppProcess();
~DvppProcess();
Status InitResource(aclrtStream stream);
void Finalize();
Status InitJpegDecodePara(const DvppDecodePara &decode_para); // jpeg decode + (resize | crop)
Status InitResizePara(const DvppResizePara &resize_para); // jpeg decode + resize
Status InitCropPara(const DvppCropPara &crop_para); // jpeg decode + crop
Status InitCropAndPastePara(const DvppCropAndPastePara &crop_and_paste_para); // jpeg decode + crop&paste
Status InitWithJsonConfig(const std::string &json_config);
// output device buffer will be destroy by DvppProcess itself.
Status Process(const void *pic_buffer, size_t pic_buffer_size, void **output_device_buffer, size_t *output_size);
Status Process(const std::vector<const void *> &pic_buffer_list, const std::vector<size_t> &pic_buffer_size_list,
void **output_device_buffer, size_t *output_size);
bool HasLoaded() const { return loaded_flag_; }
private:
bool loaded_flag_ = false;
uint32_t pic_width_ = 0;
uint32_t pic_height_ = 0;
DvppDecodePara decode_para_;
DvppResizePara resize_para_;
DvppCropPara crop_para_;
DvppCropAndPastePara crop_and_paste_para_;
// only one of the resize or crop flag can be true
bool to_resize_flag_ = false;
bool to_crop_flag_ = false;
bool to_crop_and_paste_flag_ = false;
void *input_pic_dev_buffer_ = nullptr;
uint32_t input_pic_buffer_size_ = 0;
uint32_t decode_output_buffer_size_ = 0;
void *decode_output_buffer_dev_ = nullptr;
acldvppPicDesc *decode_output_desc_ = nullptr;
acldvppResizeConfig *resize_config_ = nullptr;
acldvppRoiConfig *crop_area_ = nullptr;
acldvppRoiConfig *paste_area_ = nullptr;
acldvppPicDesc *vpc_output_desc_ = nullptr;
void *vpc_output_buffer_dev_ = nullptr; // vpc_output_buffer_size_ length
uint32_t vpc_output_buffer_size_ = 0;
void *batch_vpc_output_buffer_dev_ = nullptr; // batch_size_ * vpc_output_buffer_size_ length
uint32_t batch_size_ = 0;
aclrtStream stream_ = nullptr;
acldvppChannelDesc *dvpp_channel_desc_ = nullptr;
uint32_t AlignmentHelper(uint32_t org_size, uint32_t alignment) const;
uint32_t GetImageBufferSize(uint32_t stride_width, uint32_t stride_height, acldvppPixelFormat pixel_format) const;
Status GetPicDescStride(uint32_t width, uint32_t height, uint32_t *stride_width, uint32_t *stride_height);
Status GetPicDescStrideDecode(uint32_t width, uint32_t height, uint32_t *stride_width, uint32_t *stride_height);
Status InputInputBuffer(const void *pic_buffer, size_t pic_buffer_size);
Status InitDecodeOutputDesc(uint32_t image_width,
uint32_t image_height); // decode_output_desc_, decode_output_buffer_dev_
Status CheckRoiAreaWidthHeight(uint32_t width, uint32_t height);
Status CheckAndAdjustRoiArea(DvppRoiArea *area);
Status UpdateCropArea(uint32_t image_width, uint32_t image_height);
Status CheckResizeImageInfo(uint32_t image_width, uint32_t image_height) const;
void DestroyDecodeDesc();
Status InitVpcOutputDesc(uint32_t output_width, uint32_t output_height,
acldvppPixelFormat pixel_format); // vpc_output_desc_, vpc_output_buffer_dev_batch_
Status InitRoiAreaConfig(const DvppRoiArea &init_para, acldvppRoiConfig **roi_area);
Status InitCommonCropPara(uint32_t out_width, uint32_t out_height, DvppCropInfo *crop_info);
Status InitResizeOutputDesc(); // vpc_output_desc_, vpc_output_buffer_dev_, resize_config
Status InitCropOutputDesc(); // vpc_output_desc_, vpc_output_buffer_dev_, crop_area_
Status InitCropAndPasteOutputDesc(); // vpc_output_desc_, vpc_output_buffer_dev_, crop_area_, paste_area_
void DestroyVpcOutputDesc();
Status ProcessDecode();
Status ProcessResize();
Status ProcessCrop();
Status ProcessCropAndPaste();
void DestroyResource();
Status GetJpegWidthHeight(const void *pic_buffer, size_t pic_buffer_size, uint32_t *image_width,
uint32_t *image_height);
};
} // namespace mindspore::api
#endif // MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_DVPP_PROCESS_H

File diff suppressed because it is too large Load Diff

@ -0,0 +1,51 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_MODEL_CONVERTER_H
#define MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_MODEL_CONVERTER_H
#include <vector>
#include <string>
#include <map>
#include <memory>
#include "include/api/types.h"
#include "include/api/status.h"
#include "mindspore/core/ir/func_graph.h"
#include "transform/graph_ir/types.h"
#include "external/ge/ge_ir_build.h"
#include "cxx_api/model/acl/acl_model_options.h"
namespace mindspore::api {
class ModelConverter {
public:
ModelConverter() : options_(nullptr) {}
Buffer LoadMindIR(const Buffer &model_data);
Buffer LoadAscendIR(const Buffer &model_data);
void set_options(AclModelOptions *options) { options_ = options; }
static Buffer ReadFile(const std::string &file);
static void RegAllOp();
private:
std::shared_ptr<FuncGraph> ConvertMindIrToFuncGraph(const Buffer &model_data);
transform::DfGraphPtr ConvertFuncGraphToAIR(const FuncGraphPtr &anf_graph);
Buffer BuildAirModel(const transform::DfGraphPtr &graph, const std::map<std::string, std::string> &acl_options);
AclModelOptions *options_;
};
} // namespace mindspore::api
#endif // MINDSPORE_CCSRC_CXXAPI_SESSION_ACL_MODEL_CONVERTER_H

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save