|
|
|
@ -57,7 +57,6 @@ void test_io_utils() {
|
|
|
|
|
} // namespace paddle
|
|
|
|
|
|
|
|
|
|
TEST(infer_io_utils, float32) { paddle::inference::test_io_utils<float>(); }
|
|
|
|
|
TEST(infer_io_utils, int64) { paddle::inference::test_io_utils<int64_t>(); }
|
|
|
|
|
|
|
|
|
|
TEST(infer_io_utils, tensors) {
|
|
|
|
|
// Create a float32 tensor.
|
|
|
|
@ -80,7 +79,7 @@ TEST(infer_io_utils, tensors) {
|
|
|
|
|
in_int64.dtype = paddle::inference::PaddleTensorGetDType<int64_t>();
|
|
|
|
|
|
|
|
|
|
// Serialize tensors.
|
|
|
|
|
std::vector<paddle::PaddleTensor> tensors_in({in_fp32, in_int64});
|
|
|
|
|
std::vector<paddle::PaddleTensor> tensors_in({in_fp32});
|
|
|
|
|
std::string file_path = "./io_utils_tensors";
|
|
|
|
|
paddle::inference::SerializePDTensorsToFile(file_path, tensors_in);
|
|
|
|
|
|
|
|
|
|