|
|
|
@ -150,16 +150,16 @@ if (WITH_ANAKIN AND WITH_MKL)
|
|
|
|
|
SRCS
|
|
|
|
|
${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/libinference_anakin_api* # compiled anakin api
|
|
|
|
|
${ANAKIN_INSTALL_DIR} # anakin release
|
|
|
|
|
DSTS ${dst_dir}/inference/anakin ${FLUID_INSTALL_DIR}/third_party/install/anakin)
|
|
|
|
|
DSTS ${FLUID_INSTALL_DIR}/third_party/install/anakin ${FLUID_INSTALL_DIR}/third_party/install/anakin)
|
|
|
|
|
list(APPEND inference_deps anakin_inference_lib)
|
|
|
|
|
endif()
|
|
|
|
|
|
|
|
|
|
set(module "inference")
|
|
|
|
|
copy(inference_lib DEPS ${inference_deps}
|
|
|
|
|
SRCS ${src_dir}/${module}/*.h ${PADDLE_BINARY_DIR}/paddle/fluid/inference/libpaddle_fluid.*
|
|
|
|
|
${src_dir}/${module}/api/paddle_inference_api.h ${src_dir}/${module}/api/demo_ci
|
|
|
|
|
${src_dir}/${module}/api/paddle_inference_api.h
|
|
|
|
|
${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/paddle_inference_pass.h
|
|
|
|
|
DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}
|
|
|
|
|
DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
set(module "platform")
|
|
|
|
@ -188,18 +188,38 @@ copy(cmake_cache
|
|
|
|
|
# This command generates a complete fluid library for both train and inference
|
|
|
|
|
add_custom_target(fluid_lib_dist DEPENDS ${fluid_lib_dist_dep})
|
|
|
|
|
|
|
|
|
|
# Following commands generate a inference-only fluid library
|
|
|
|
|
# third_party, version.txt and CMakeCache.txt are the same position with ${FLUID_INSTALL_DIR}
|
|
|
|
|
copy(third_party DEPS fluid_lib_dist
|
|
|
|
|
SRCS ${FLUID_INSTALL_DIR}/third_party ${FLUID_INSTALL_DIR}/CMakeCache.txt
|
|
|
|
|
DSTS ${FLUID_INFERENCE_INSTALL_DIR} ${FLUID_INFERENCE_INSTALL_DIR}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# only need libpaddle_fluid.so/a and paddle_inference_api.h for inference-only library
|
|
|
|
|
copy(inference_api_lib DEPS fluid_lib_dist
|
|
|
|
|
SRCS ${FLUID_INSTALL_DIR}/paddle/fluid/inference/libpaddle_fluid.*
|
|
|
|
|
${FLUID_INSTALL_DIR}/paddle/fluid/inference/paddle_inference_api.h
|
|
|
|
|
DSTS ${FLUID_INFERENCE_INSTALL_DIR}/paddle/lib ${FLUID_INFERENCE_INSTALL_DIR}/paddle/include
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
add_custom_target(inference_lib_dist DEPENDS third_party inference_api_lib)
|
|
|
|
|
|
|
|
|
|
# paddle fluid version
|
|
|
|
|
execute_process(
|
|
|
|
|
COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
|
|
|
|
|
WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
|
|
|
|
|
OUTPUT_VARIABLE PADDLE_GIT_COMMIT)
|
|
|
|
|
set(version_file ${FLUID_INSTALL_DIR}/version.txt)
|
|
|
|
|
file(WRITE ${version_file}
|
|
|
|
|
"GIT COMMIT ID: ${PADDLE_GIT_COMMIT}\n"
|
|
|
|
|
"WITH_MKL: ${WITH_MKL}\n"
|
|
|
|
|
"WITH_GPU: ${WITH_GPU}\n")
|
|
|
|
|
if(WITH_GPU)
|
|
|
|
|
file(APPEND ${version_file}
|
|
|
|
|
"CUDA version: ${CUDA_VERSION}\n"
|
|
|
|
|
"CUDNN version: v${CUDNN_MAJOR_VERSION}\n")
|
|
|
|
|
endif()
|
|
|
|
|
function(version version_file)
|
|
|
|
|
execute_process(
|
|
|
|
|
COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
|
|
|
|
|
WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
|
|
|
|
|
OUTPUT_VARIABLE PADDLE_GIT_COMMIT)
|
|
|
|
|
file(WRITE ${version_file}
|
|
|
|
|
"GIT COMMIT ID: ${PADDLE_GIT_COMMIT}\n"
|
|
|
|
|
"WITH_MKL: ${WITH_MKL}\n"
|
|
|
|
|
"WITH_MKLDNN: ${WITH_MKLDNN}\n"
|
|
|
|
|
"WITH_GPU: ${WITH_GPU}\n")
|
|
|
|
|
if(WITH_GPU)
|
|
|
|
|
file(APPEND ${version_file}
|
|
|
|
|
"CUDA version: ${CUDA_VERSION}\n"
|
|
|
|
|
"CUDNN version: v${CUDNN_MAJOR_VERSION}\n")
|
|
|
|
|
endif()
|
|
|
|
|
endfunction()
|
|
|
|
|
version(${FLUID_INSTALL_DIR}/version.txt)
|
|
|
|
|
version(${FLUID_INFERENCE_INSTALL_DIR}/version.txt)
|
|
|
|
|