commit
7e3da978c8
@ -0,0 +1,21 @@
|
||||
cmake_minimum_required(VERSION 3.14)
|
||||
project(QuickStartCpp)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
|
||||
message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
|
||||
endif()
|
||||
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
|
||||
file(GLOB_RECURSE QUICK_START_CXX ${CMAKE_CURRENT_SOURCE_DIR}/*.cc)
|
||||
|
||||
add_library(mindspore-lite STATIC IMPORTED)
|
||||
set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/lib/libmindspore-lite.a)
|
||||
|
||||
add_executable(mindspore_quick_start_cpp ${QUICK_START_CXX})
|
||||
|
||||
target_link_libraries(
|
||||
mindspore_quick_start_cpp
|
||||
-Wl,--whole-archive mindspore-lite -Wl,--no-whole-archive
|
||||
pthread
|
||||
)
|
@ -0,0 +1,43 @@
|
||||
#!/bin/bash
|
||||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
BASEPATH=$(cd "$(dirname $0)"; pwd)
|
||||
get_version() {
|
||||
VERSION_MAJOR=$(grep "const int ms_version_major =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_MINOR=$(grep "const int ms_version_minor =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_REVISION=$(grep "const int ms_version_revision =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_STR=${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_REVISION}
|
||||
}
|
||||
get_version
|
||||
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/1.1/mobilenetv2.ms"
|
||||
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}/MindSpore/lite/release/linux/mindspore-lite-${VERSION_STR}-inference-linux-x64.tar.gz"
|
||||
|
||||
mkdir -p build
|
||||
mkdir -p lib
|
||||
mkdir -p model
|
||||
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
|
||||
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
|
||||
fi
|
||||
if [ ! -e ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-linux-x64.tar.gz ]; then
|
||||
wget -c -O ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-linux-x64.tar.gz --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
|
||||
fi
|
||||
tar xzvf ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-linux-x64.tar.gz -C ${BASEPATH}/build/
|
||||
cp -r ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-linux-x64-avx/lib/libmindspore-lite.a ${BASEPATH}/lib
|
||||
cp -r ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-linux-x64-avx/include ${BASEPATH}/
|
||||
|
||||
cd ${BASEPATH}/build
|
||||
cmake ${BASEPATH}
|
||||
make
|
@ -0,0 +1,179 @@
|
||||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <cstring>
|
||||
#include "include/errorcode.h"
|
||||
#include "include/model.h"
|
||||
#include "include/context.h"
|
||||
#include "include/lite_session.h"
|
||||
|
||||
char *ReadFile(const char *file, size_t *size) {
|
||||
if (file == nullptr) {
|
||||
std::cerr << "file is nullptr." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::ifstream ifs(file);
|
||||
if (!ifs.good()) {
|
||||
std::cerr << "file: " << file << " is not exist." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (!ifs.is_open()) {
|
||||
std::cerr << "file: " << file << " open failed." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
ifs.seekg(0, std::ios::end);
|
||||
*size = ifs.tellg();
|
||||
std::unique_ptr<char[]> buf(new (std::nothrow) char[*size]);
|
||||
if (buf == nullptr) {
|
||||
std::cerr << "malloc buf failed, file: " << file << std::endl;
|
||||
ifs.close();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
ifs.seekg(0, std::ios::beg);
|
||||
ifs.read(buf.get(), *size);
|
||||
ifs.close();
|
||||
|
||||
return buf.release();
|
||||
}
|
||||
|
||||
template <typename T, typename Distribution>
|
||||
void GenerateRandomData(int size, void *data, Distribution distribution) {
|
||||
std::mt19937 random_engine;
|
||||
int elements_num = size / sizeof(T);
|
||||
(void)std::generate_n(static_cast<T *>(data), elements_num,
|
||||
[&]() { return static_cast<T>(distribution(random_engine)); });
|
||||
}
|
||||
|
||||
int GenerateInputDataWithRandom(std::vector<mindspore::tensor::MSTensor *> inputs) {
|
||||
for (auto tensor : inputs) {
|
||||
auto input_data = tensor->MutableData();
|
||||
void *random_data = malloc(tensor->Size());
|
||||
if (input_data == nullptr) {
|
||||
std::cerr << "MallocData for inTensor failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
GenerateRandomData<float>(tensor->Size(), random_data, std::uniform_real_distribution<float>(0.1f, 1.0f));
|
||||
// Copy data to input tensor.
|
||||
memcpy(input_data, random_data, tensor->Size());
|
||||
}
|
||||
return mindspore::lite::RET_OK;
|
||||
}
|
||||
|
||||
int Run(mindspore::session::LiteSession *session) {
|
||||
auto inputs = session->GetInputs();
|
||||
auto ret = GenerateInputDataWithRandom(inputs);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
std::cerr << "Generate Random Input Data failed." << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
||||
ret = session->RunGraph();
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
std::cerr << "Inference error " << ret << std::endl;
|
||||
return ret;
|
||||
}
|
||||
|
||||
auto out_tensors = session->GetOutputs();
|
||||
for (auto tensor : out_tensors) {
|
||||
std::cout << "tensor name is:" << tensor.first << " tensor size is:" << tensor.second->Size()
|
||||
<< " tensor elements num is:" << tensor.second->ElementsNum() << std::endl;
|
||||
auto out_data = reinterpret_cast<float *>(tensor.second->MutableData());
|
||||
std::cout << "output data is:";
|
||||
for (int i = 0; i < tensor.second->ElementsNum() && i <= 50; i++) {
|
||||
std::cout << out_data[i] << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
return mindspore::lite::RET_OK;
|
||||
}
|
||||
|
||||
mindspore::session::LiteSession *Compile(mindspore::lite::Model *model) {
|
||||
// Create and init context.
|
||||
auto context = std::make_shared<mindspore::lite::Context>();
|
||||
if (context == nullptr) {
|
||||
std::cerr << "New context failed while." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Create the session.
|
||||
mindspore::session::LiteSession *session = mindspore::session::LiteSession::CreateSession(context.get());
|
||||
if (session == nullptr) {
|
||||
std::cerr << "CreateSession failed while running." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Compile graph.
|
||||
auto ret = session->CompileGraph(model);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
std::cerr << "Compile failed while running." << std::endl;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Note: when use model->Free(), the model can not be compiled again.
|
||||
if (model != nullptr) {
|
||||
model->Free();
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
int CompileAndRun(int argc, const char **argv) {
|
||||
if (argc < 2) {
|
||||
std::cerr << "Usage: ./mindspore_quick_start_cpp ../model/mobilenetv2.ms\n";
|
||||
return -1;
|
||||
}
|
||||
// Read model file.
|
||||
auto model_path = argv[1];
|
||||
size_t size = 0;
|
||||
char *model_buf = ReadFile(model_path, &size);
|
||||
if (model_buf == nullptr) {
|
||||
std::cerr << "Read model file failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
// Load the .ms model.
|
||||
auto model = mindspore::lite::Model::Import(model_buf, size);
|
||||
delete[](model_buf);
|
||||
if (model == nullptr) {
|
||||
std::cerr << "Import model file failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
// Compile MindSpore Lite model.
|
||||
auto session = Compile(model);
|
||||
if (session == nullptr) {
|
||||
std::cerr << "Create session failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
// Run inference.
|
||||
auto ret = Run(session);
|
||||
if (ret != mindspore::lite::RET_OK) {
|
||||
std::cerr << "MindSpore Lite run failed." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
// Delete model buffer.
|
||||
delete model;
|
||||
// Delete session buffer.
|
||||
delete session;
|
||||
return mindspore::lite::RET_OK;
|
||||
}
|
||||
|
||||
int main(int argc, const char **argv) { return CompileAndRun(argc, argv); }
|
@ -0,0 +1,48 @@
|
||||
cmake_minimum_required(VERSION 3.14)
|
||||
project(RuntimeCpp)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
|
||||
message(FATAL_ERROR "GCC version ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
|
||||
endif()
|
||||
link_directories(${CMAKE_CURRENT_SOURCE_DIR}/lib)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17")
|
||||
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
|
||||
file(GLOB_RECURSE RUNTIME_CPP ${CMAKE_CURRENT_SOURCE_DIR}/*.cc)
|
||||
|
||||
add_executable(runtime_cpp ${RUNTIME_CPP})
|
||||
find_library(log-lib log)
|
||||
target_link_libraries(
|
||||
runtime_cpp
|
||||
-Wl,--whole-archive mindspore-lite -Wl,--no-whole-archive
|
||||
hiai
|
||||
hiai_ir
|
||||
hiai_ir_build
|
||||
${log-lib}
|
||||
)
|
||||
|
||||
SET(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_SOURCE_DIR}/build/tmp)
|
||||
|
||||
INSTALL(TARGETS runtime_cpp
|
||||
DESTINATION exe)
|
||||
|
||||
INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/lib/libhiai.so
|
||||
DESTINATION lib)
|
||||
INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/lib/libhiai_ir.so
|
||||
DESTINATION lib)
|
||||
INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/lib/libhiai_ir_build.so
|
||||
DESTINATION lib)
|
||||
INSTALL(FILES
|
||||
${ANDROID_NDK}/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android/libc++_shared.so
|
||||
DESTINATION lib)
|
||||
|
||||
INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/model/mobilenetv2.ms
|
||||
DESTINATION model)
|
||||
|
||||
set(CPACK_GENERATOR "TGZ")
|
||||
|
||||
set(CPACK_PACKAGE_FILE_NAME "runtime_cpp_demo")
|
||||
|
||||
set(CPACK_PACKAGE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/output)
|
||||
|
||||
include(CPack)
|
@ -0,0 +1,49 @@
|
||||
#!/bin/bash
|
||||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
BASEPATH=$(
|
||||
cd "$(dirname $0)"
|
||||
pwd
|
||||
)
|
||||
get_version() {
|
||||
VERSION_MAJOR=$(grep "const int ms_version_major =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_MINOR=$(grep "const int ms_version_minor =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_REVISION=$(grep "const int ms_version_revision =" ${BASEPATH}/../../include/version.h | tr -dc "[0-9]")
|
||||
VERSION_STR=${VERSION_MAJOR}.${VERSION_MINOR}.${VERSION_REVISION}
|
||||
}
|
||||
get_version
|
||||
MODEL_DOWNLOAD_URL="https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/1.1/mobilenetv2.ms"
|
||||
MINDSPORE_LITE_DOWNLOAD_URL="https://ms-release.obs.cn-north-4.myhuaweicloud.com/${VERSION_STR}/MindSpore/lite/release/android/mindspore-lite-${VERSION_STR}-inference-android.tar.gz"
|
||||
|
||||
mkdir -p build
|
||||
mkdir -p lib
|
||||
mkdir -p model
|
||||
if [ ! -e ${BASEPATH}/model/mobilenetv2.ms ]; then
|
||||
wget -c -O ${BASEPATH}/model/mobilenetv2.ms --no-check-certificate ${MODEL_DOWNLOAD_URL}
|
||||
fi
|
||||
if [ ! -e ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android.tar.gz ]; then
|
||||
wget -c -O ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android.tar.gz --no-check-certificate ${MINDSPORE_LITE_DOWNLOAD_URL}
|
||||
fi
|
||||
tar xzvf ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android.tar.gz -C ${BASEPATH}/build/
|
||||
cp -r ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android/lib/aarch64/libmindspore-lite.a ${BASEPATH}/lib
|
||||
cp -r ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android/third_party/hiai_ddk/lib/aarch64/*.so ${BASEPATH}/lib
|
||||
cp -r ${BASEPATH}/build/mindspore-lite-${VERSION_STR}-inference-android/include ${BASEPATH}/
|
||||
|
||||
cd ${BASEPATH}/build
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
|
||||
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="arm64-v8a" -DANDROID_STL="c++_shared" ${BASEPATH}
|
||||
|
||||
make && make install && make package
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue