diff --git a/mindspore/ccsrc/minddata/dataset/core/tensor_row.h b/mindspore/ccsrc/minddata/dataset/core/tensor_row.h index 0b3b81183c..df20e8e4a9 100644 --- a/mindspore/ccsrc/minddata/dataset/core/tensor_row.h +++ b/mindspore/ccsrc/minddata/dataset/core/tensor_row.h @@ -72,6 +72,99 @@ class TensorRow { // Destructor ~TensorRow() = default; + /// Convert a vector of primitive types to a TensorRow consisting of n single data Tensors. + /// \tparam `T` + /// \param[in] o input vector + /// \param[out] output TensorRow + template + static Status ConvertToTensorRow(const std::vector &o, TensorRow *output) { + DataType data_type = DataType::FromCType(); + if (data_type == DataType::DE_UNKNOWN) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type was not recognized."); + } + if (data_type == DataType::DE_STRING) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported."); + } + + for (int i = 0; i < o.size(); i++) { + std::shared_ptr tensor; + Tensor::CreateEmpty(TensorShape({1}), data_type, &tensor); + std::string_view s; + tensor->SetItemAt({0}, o[i]); + output->push_back(tensor); + } + return Status::OK(); + } + + /// Convert a single primitive type to a TensorRow consisting of one single data Tensor. + /// \tparam `T` + /// \param[in] o input + /// \param[out] output TensorRow + template + static Status ConvertToTensorRow(const T &o, TensorRow *output) { + DataType data_type = DataType::FromCType(); + if (data_type == DataType::DE_UNKNOWN) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type was not recognized."); + } + if (data_type == DataType::DE_STRING) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported."); + } + std::shared_ptr tensor; + Tensor::CreateEmpty(TensorShape({1}), data_type, &tensor); + tensor->SetItemAt({0}, o); + output->push_back(tensor); + return Status::OK(); + } + + /// Return the value in a TensorRow consiting of 1 single data Tensor + /// \tparam `T` + /// \param[in] input TensorRow + /// \param[out] o the primitive variable + template + static Status ConvertFromTensorRow(const TensorRow &input, T *o) { + DataType data_type = DataType::FromCType(); + if (data_type == DataType::DE_UNKNOWN) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: Data type was not recognized."); + } + if (data_type == DataType::DE_STRING) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported."); + } + if (input.size() != 1) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input TensorRow is empty."); + } + if (input.at(0)->type() != data_type) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The output type doesn't match the input tensor type."); + } + if (input.at(0)->shape() != TensorShape({1})) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input tensors must have a shape of {1}."); + } + return input.at(0)->GetItemAt(o, {0}); + } + + /// Convert a TensorRow consisting of n single data tensors to a vector of size n + /// \tparam `T` + /// \param[in] o TensorRow consisting of n single data tensors + /// \param[out] o vector of primitive variable + template + static Status ConvertFromTensorRow(const TensorRow &input, std::vector *o) { + DataType data_type = DataType::FromCType(); + if (data_type == DataType::DE_UNKNOWN) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: Data type was not recognized."); + } + if (data_type == DataType::DE_STRING) { + RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported."); + } + for (int i = 0; i < input.size(); i++) { + if (input.at(i)->shape() != TensorShape({1})) { + RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input tensor must have a shape of 1."); + } + T item; + RETURN_IF_NOT_OK(input.at(i)->GetItemAt(&item, {0})); + o->push_back(item); + } + return Status::OK(); + } + // Functions to fetch/set id/vector row_id_type getId() const { return id_; } diff --git a/tests/ut/cpp/dataset/CMakeLists.txt b/tests/ut/cpp/dataset/CMakeLists.txt index 63d08e3163..ccd08d51cf 100644 --- a/tests/ut/cpp/dataset/CMakeLists.txt +++ b/tests/ut/cpp/dataset/CMakeLists.txt @@ -71,6 +71,7 @@ SET(DE_UT_SRCS status_test.cc task_manager_test.cc tensor_test.cc + tensor_row_test.cc tensor_string_test.cc tensorshape_test.cc tfReader_op_test.cc diff --git a/tests/ut/cpp/dataset/tensor_row_test.cc b/tests/ut/cpp/dataset/tensor_row_test.cc new file mode 100644 index 0000000000..877ffdfc3c --- /dev/null +++ b/tests/ut/cpp/dataset/tensor_row_test.cc @@ -0,0 +1,258 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "minddata/dataset/core/client.h" +#include "common/common.h" +#include "gtest/gtest.h" +#include "minddata/dataset/core/tensor.h" +#include "minddata/dataset/core/tensor_row.h" + +using namespace mindspore::dataset; + +namespace py = pybind11; + +class MindDataTestTensorRowDE : public UT::Common { + public: + MindDataTestTensorRowDE() {} + void SetUp() { GlobalInit(); } +}; + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowBoolTest) { + Status s; + + TensorRow bool_output; + bool bool_value = true; + s = TensorRow::ConvertToTensorRow(bool_value, &bool_output); + ASSERT_EQ(s, Status::OK()); + TensorRow expected_bool; + std::shared_ptr expected_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor); + expected_tensor->SetItemAt({0}, bool_value); + expected_bool.push_back(expected_tensor); + ASSERT_EQ(*(bool_output.at(0)) == *(expected_bool.at(0)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowIntTest) { + Status s; + TensorRow int_output; + int32_t int_value = 12; + TensorRow expected_int; + s = TensorRow::ConvertToTensorRow(int_value, &int_output); + ASSERT_EQ(s, Status::OK()); + std::shared_ptr expected_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_INT32), &expected_tensor); + expected_tensor->SetItemAt({0}, int_value); + expected_int.push_back(expected_tensor); + ASSERT_EQ(*(int_output.at(0)) == *(expected_int.at(0)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowFloatTest) { + Status s; + TensorRow expected_bool; + TensorRow float_output; + float float_value = 12.57; + TensorRow expected_float; + s = TensorRow::ConvertToTensorRow(float_value, &float_output); + ASSERT_EQ(s, Status::OK()); + std::shared_ptr expected_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT32), &expected_tensor); + expected_tensor->SetItemAt({0}, float_value); + expected_float.push_back(expected_tensor); + ASSERT_EQ(*(float_output.at(0)) == *(expected_float.at(0)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowBoolVectorTest) { + Status s; + TensorRow bool_output; + std::vector bool_value = {true, false}; + s = TensorRow::ConvertToTensorRow(bool_value, &bool_output); + ASSERT_EQ(s, Status::OK()); + TensorRow expected_bool; + std::shared_ptr expected_tensor, expected_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor); + expected_tensor->SetItemAt({0}, bool_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor2); + expected_tensor2->SetItemAt({0}, bool_value[1]); + expected_bool.push_back(expected_tensor); + expected_bool.push_back(expected_tensor2); + ASSERT_EQ(*(bool_output.at(0)) == *(expected_bool.at(0)), true); + ASSERT_EQ(*(bool_output.at(1)) == *(expected_bool.at(1)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowIntVectorTest) { + Status s; + TensorRow int_output; + std::vector int_value = {12, 16}; + TensorRow expected_int; + s = TensorRow::ConvertToTensorRow(int_value, &int_output); + ASSERT_EQ(s, Status::OK()); + std::shared_ptr expected_tensor, expected_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &expected_tensor); + expected_tensor->SetItemAt({0}, int_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &expected_tensor2); + expected_tensor2->SetItemAt({0}, int_value[1]); + expected_int.push_back(expected_tensor); + expected_int.push_back(expected_tensor2); + ASSERT_EQ(*(int_output.at(0)) == *(expected_int.at(0)), true); + ASSERT_EQ(*(int_output.at(1)) == *(expected_int.at(1)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowFloatVectorTest) { + Status s; + TensorRow float_output; + std::vector float_value = {12.57, 0.264}; + TensorRow expected_float; + s = TensorRow::ConvertToTensorRow(float_value, &float_output); + ASSERT_EQ(s, Status::OK()); + std::shared_ptr expected_tensor, expected_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &expected_tensor); + expected_tensor->SetItemAt({0}, float_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &expected_tensor2); + expected_tensor2->SetItemAt({0}, float_value[1]); + expected_float.push_back(expected_tensor); + expected_float.push_back(expected_tensor2); + ASSERT_EQ(*(float_output.at(0)) == *(expected_float.at(0)), true); + ASSERT_EQ(*(float_output.at(1)) == *(expected_float.at(1)), true); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowBoolTest) { + Status s; + bool bool_value = true; + bool result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor); + input_tensor->SetItemAt({0}, bool_value); + input_tensor_row.push_back(input_tensor); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(bool_value, result); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowIntTest) { + Status s; + int32_t int_value = 12; + int32_t result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_INT32), &input_tensor); + input_tensor->SetItemAt({0}, int_value); + input_tensor_row.push_back(input_tensor); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(int_value, result); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowFloatTest) { + Status s; + float float_value = 12.57; + float result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT32), &input_tensor); + input_tensor->SetItemAt({0}, float_value); + input_tensor_row.push_back(input_tensor); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(float_value, result); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowBoolVectorTest) { + Status s; + std::vector bool_value = {true, false}; + std::vector result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor1, input_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor1); + input_tensor1->SetItemAt({0}, bool_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor2); + input_tensor2->SetItemAt({0}, bool_value[1]); + input_tensor_row.push_back(input_tensor1); + input_tensor_row.push_back(input_tensor2); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(result, bool_value); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowIntVectorTest) { + Status s; + std::vector int_value = {12, 16}; + std::vector result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor1, input_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &input_tensor1); + input_tensor1->SetItemAt({0}, int_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &input_tensor2); + input_tensor2->SetItemAt({0}, int_value[1]); + input_tensor_row.push_back(input_tensor1); + input_tensor_row.push_back(input_tensor2); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(result, int_value); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowFloatVectorTest) { + Status s; + std::vector float_value = {12.57, 0.264}; + std::vector result; + TensorRow input_tensor_row; + std::shared_ptr input_tensor1, input_tensor2; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &input_tensor1); + input_tensor1->SetItemAt({0}, float_value[0]); + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &input_tensor2); + input_tensor2->SetItemAt({0}, float_value[1]); + input_tensor_row.push_back(input_tensor1); + input_tensor_row.push_back(input_tensor2); + s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result); + ASSERT_EQ(s, Status::OK()); + ASSERT_EQ(result, float_value); +} + +TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowInvalidDataTest) { + TensorRow output; + std::string string_input = "Bye"; + ASSERT_FALSE(TensorRow::ConvertToTensorRow(string_input, &output).IsOk()); + std::vector string_vector_input = {"Hello"}; + ASSERT_FALSE(TensorRow::ConvertToTensorRow(string_vector_input, &output).IsOk()); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowTypeMismatchTest) { + TensorRow input_tensor_row; + std::shared_ptr input_tensor1; + Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor1); + input_tensor1->SetItemAt({0}, false); + input_tensor_row.push_back(input_tensor1); + double output; + ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk()); + std::vector output_vector; + ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output_vector).IsOk()); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowInvalidShapeTest) { + TensorRow input_tensor_row; + std::shared_ptr input_tensor1; + Tensor::CreateEmpty(TensorShape({2}), DataType(DataType::DE_FLOAT64), &input_tensor1); + input_tensor_row.push_back(input_tensor1); + std::vector output; + ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk()); + std::vector output_vector; + ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output_vector).IsOk()); +} + +TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowEmptyInputTest) { + TensorRow input_tensor_row; + double output; + ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk()); +} \ No newline at end of file