From 624511337c2e01679451d5982ea4f7d81c721b4c Mon Sep 17 00:00:00 2001 From: luoyang Date: Tue, 6 Apr 2021 19:40:10 +0800 Subject: [PATCH] MindData Codespell correction --- .../engine/opt/optional/tensor_op_fusion_pass.h | 2 +- .../engine/opt/post/generator_node_pass.h | 10 +++++----- .../dataset/engine/opt/pre/deep_copy_pass.h | 4 ++-- .../dataset/engine/opt/pre/epoch_ctrl_pass.h | 12 ++++++------ .../engine/opt/pre/input_validation_pass.h | 2 +- .../dataset/engine/opt/pre/node_removal_pass.h | 10 +++++----- .../dataset/kernels/image/bounding_box.h | 6 +++--- .../kernels/image/bounding_box_augment_op.h | 2 +- .../image/soft_dvpp/utils/external_soft_dp.h | 8 ++++---- .../kernels/image/soft_dvpp/utils/soft_jpegd.cc | 16 ++++++++-------- .../kernels/image/soft_dvpp/utils/soft_jpegd.h | 6 +++--- .../kernels/image/soft_dvpp/utils/soft_vpc.cc | 4 ++-- .../image/soft_dvpp/utils/yuv_scaler_para_set.cc | 16 ++++++++-------- .../image/soft_dvpp/utils/yuv_scaler_para_set.h | 4 ++-- .../dataset/text/kernels/basic_tokenizer_op.h | 2 +- .../ccsrc/minddata/dataset/util/numa_interface.h | 2 +- .../minddata/mindrecord/include/shard_column.h | 2 +- .../minddata/mindrecord/include/shard_header.h | 2 +- .../ccsrc/minddata/mindrecord/io/shard_reader.cc | 2 +- .../minddata/mindrecord/meta/shard_column.cc | 6 +++--- tests/ut/cpp/dataset/common/bboxop_common.h | 2 +- tests/ut/cpp/mindrecord/ut_shard_writer_test.cc | 10 +++++----- .../python/dataset/test_generator_reset_pass.py | 6 +++--- tests/ut/python/dataset/test_pyfunc.py | 12 ++++++------ tests/ut/python/dataset/test_random_rotation.py | 2 +- tests/ut/python/dataset/test_take.py | 4 ++-- tests/ut/python/mindrecord/skip_test_issue.py | 2 +- .../mindrecord/test_cifar100_to_mindrecord.py | 2 +- .../mindrecord/test_mindrecord_exception.py | 14 +++++++------- 29 files changed, 86 insertions(+), 86 deletions(-) diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h index 4f1d2074b8..75c6541ac6 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/optional/tensor_op_fusion_pass.h @@ -28,7 +28,7 @@ namespace dataset { class TensorOpFusionPass : public IRNodePass { /// \brief Identifies and fuses tensor ops within MapOp /// \param[in] node The node being visited - /// \param[inout] *modified indicates whether the node has been visited + /// \param[in, out] *modified indicates whether the node has been visited /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; }; diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/post/generator_node_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/post/generator_node_pass.h index 141719113a..9d2adcc209 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/post/generator_node_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/post/generator_node_pass.h @@ -38,31 +38,31 @@ class GeneratorNodePass : public IRNodePass { /// \brief Record the starting point to collect the Generator node /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Record the starting point to collect the Generator node /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Add the Generator node to the set /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Add the Generator node(s) from the set to this Repeat node for run-time processing /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status VisitAfter(std::shared_ptr node, bool *const modified) override; /// \brief Add the Generator node(s) from the set to this EpochCtrl node for run-time processing /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status VisitAfter(std::shared_ptr node, bool *const modified) override; diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/deep_copy_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/deep_copy_pass.h index 87d9e21ee4..7cd68e577a 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/deep_copy_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/deep_copy_pass.h @@ -37,13 +37,13 @@ class DeepCopyPass : public IRNodePass { /// \brief Clone a new copy of the node /// \param[in] node The node being visited - /// \param[inout] *modified indicates whether the node has been visited + /// \param[in, out] *modified indicates whether the node has been visited /// \return Status code Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Reset parent after walking its sub tree. /// \param[in] node The node being visited - /// \param[inout] *modified indicates whether the node has been visited + /// \param[in, out] *modified indicates whether the node has been visited /// \return Status code Status VisitAfter(std::shared_ptr node, bool *const modified) override; diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/epoch_ctrl_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/epoch_ctrl_pass.h index 71f59f648d..9284cb7591 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/epoch_ctrl_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/epoch_ctrl_pass.h @@ -44,27 +44,27 @@ class EpochCtrlPass : public IRTreePass { /// \brief Performs finder work for BuildVocabNode that has special rules about epoch control injection. /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Performs finder work for BuildVocabNode that has special rules about epoch control injection. /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; #ifndef ENABLE_ANDROID /// \brief Performs finder work for BuildSentenceVocabNode that has special rules about epoch control injection. /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; #endif /// \brief Register the TransferNode for further action. /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status VisitAfter(std::shared_ptr node, bool *const modified) override; @@ -87,8 +87,8 @@ class EpochCtrlPass : public IRTreePass { ~EpochCtrlPass() = default; /// \brief Runs an injection pass to inject in operators needed at the pre pass stage - /// \param[inout] tree The tree to operate on. - /// \param[inout] Indicate of the tree was modified. + /// \param[in, out] tree The tree to operate on. + /// \param[in, out] Indicate of the tree was modified. /// \return Status The status code returned Status RunOnTree(std::shared_ptr root_ir, bool *const modified) override; }; diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/input_validation_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/input_validation_pass.h index bfb5f72f1c..a016906fc0 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/input_validation_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/input_validation_pass.h @@ -29,7 +29,7 @@ namespace dataset { class InputValidationPass : public IRNodePass { /// \brief Runs a validation pass to check input parameters /// \param[in] node The node being visited - /// \param[inout] *modified indicates whether the node has been visited + /// \param[in, out] *modified indicates whether the node has been visited /// \return Status code Status Visit(std::shared_ptr node, bool *const modified) override; }; diff --git a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/node_removal_pass.h b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/node_removal_pass.h index 492cab1f6c..cff5a1b8fa 100644 --- a/mindspore/ccsrc/minddata/dataset/engine/opt/pre/node_removal_pass.h +++ b/mindspore/ccsrc/minddata/dataset/engine/opt/pre/node_removal_pass.h @@ -44,19 +44,19 @@ class NodeRemovalPass : public IRTreePass { /// \brief Perform RepeatNode removal check /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Perform SkipNode removal check /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; /// \brief Perform TakeNode removal check /// \param[in] node The node being visited - /// \param[inout] modified Indicator if the node was changed at all + /// \param[in, out] modified Indicator if the node was changed at all /// \return Status The status code returned Status Visit(std::shared_ptr node, bool *const modified) override; @@ -76,8 +76,8 @@ class NodeRemovalPass : public IRTreePass { ~NodeRemovalPass() = default; /// \brief Runs a removal_nodes pass first to find out which nodes to remove, then removes them. - /// \param[inout] tree The tree to operate on. - /// \param[inout] Indicate of the tree was modified. + /// \param[in, out] tree The tree to operate on. + /// \param[in, out] Indicate of the tree was modified. /// \return Status The status code returned Status RunOnTree(std::shared_ptr root_ir, bool *const modified) override; }; diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box.h b/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box.h index 9525922a03..0fbc88821d 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box.h @@ -98,14 +98,14 @@ class BoundingBox { /// \brief Updates bounding boxes with required Top and Left padding /// \note Top and Left padding amounts required to adjust bboxs min X,Y values according to padding 'push' /// Top/Left since images 0,0 coordinate is taken from top left - /// \param bboxList: A tensor contaning bounding box tensors + /// \param bboxList: A tensor containing bounding box tensors /// \param bboxCount: total Number of bounding boxes - required within caller function to run update loop /// \param pad_top: Total amount of padding applied to image top /// \param pad_left: Total amount of padding applied to image left side static Status PadBBoxes(const TensorPtr *bbox_list, size_t bbox_count, int32_t pad_top, int32_t pad_left); /// \brief Updates and checks bounding boxes for new cropped region of image - /// \param bbox_list: A tensor contaning bounding box tensors + /// \param bbox_list: A tensor containing bounding box tensors /// \param bbox_count: total Number of bounding boxes - required within caller function to run update loop /// \param CB_Xmin: Image's CropBox Xmin coordinate /// \param CB_Xmin: Image's CropBox Ymin coordinate @@ -116,7 +116,7 @@ class BoundingBox { /// \brief Updates bounding boxes for an Image Resize Operation - Takes in set of valid BBoxes /// For e.g those that remain after a crop - /// \param bbox_list: A tensor contaning bounding box tensors + /// \param bbox_list: A tensor containing bounding box tensors /// \param bbox_count: total Number of bounding boxes - required within caller function to run update loop /// \param target_width: required width of image post resize /// \param target_height: required height of image post resize diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box_augment_op.h b/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box_augment_op.h index c992c9196e..ba6d05e7ff 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box_augment_op.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/bounding_box_augment_op.h @@ -35,7 +35,7 @@ class BoundingBoxAugmentOp : public TensorOp { static const float kDefRatio; // Constructor for BoundingBoxAugmentOp - // @param std::shared_ptr transform transform: C++ opration to apply on select bounding boxes + // @param std::shared_ptr transform transform: C++ operation to apply on select bounding boxes // @param float ratio: ratio of bounding boxes to have the transform applied on BoundingBoxAugmentOp(std::shared_ptr transform, float ratio); diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/external_soft_dp.h b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/external_soft_dp.h index 88099bb7e8..b703eb35cc 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/external_soft_dp.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/external_soft_dp.h @@ -33,10 +33,10 @@ struct SoftDpProcsessInfo { }; struct SoftDpCropInfo { - uint32_t left; // crop left boundry - uint32_t right; // crop right boundry - uint32_t up; // crop up boundry - uint32_t down; // crop down boundry + uint32_t left; // crop left boundary + uint32_t right; // crop right boundary + uint32_t up; // crop up boundary + uint32_t down; // crop down boundary }; /* diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.cc b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.cc index c0516be037..acc8cacdf0 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.cc +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.cc @@ -190,11 +190,11 @@ uint32_t SoftJpegd::ConfigVpcInputData(struct VpcInfo *vpc_input_info, int32_t * } /* - * @brief : destory libjpeg source + * @brief : destroy libjpeg source * @param [in] struct jpeg_decompress_struct &libjpeg_handler : libjpeg handle. * @param [in] tjhandle &handle : tjhandle. */ -void DestoryLibjpegSource(struct jpeg_decompress_struct *libjpeg_handler, const tjhandle &handle) { +void DestroyLibjpegSource(struct jpeg_decompress_struct *libjpeg_handler, const tjhandle &handle) { (void)tjDestroy(handle); jpeg_destroy_decompress(libjpeg_handler); } @@ -211,7 +211,7 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info, int32_t prepare_decode_res = PrepareDecode(&libjpeg_handler, vpc_input_info, soft_dp_process_info); if (prepare_decode_res != decodeSucc) { JPEGD_LOGE("prepare decode failed!"); - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeErr; } @@ -220,14 +220,14 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info, &height, &sub_sample, &color_spase); if (decode_header_res != decodeSucc) { JPEGD_LOGE("Decompress header failed, width = %d, height = %d.", width, height); - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeErr; } int32_t alloc_out_buf_res = AllocOutputBuffer(vpc_input_info, &width, &height, &sub_sample); if (alloc_out_buf_res != decodeSucc) { JPEGD_LOGE("alloc output buffer failed!"); - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeErr; } @@ -239,15 +239,15 @@ uint32_t SoftJpegd::JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info, std::this_thread::get_id()); delete[] soft_decode_out_buf_; soft_decode_out_buf_ = nullptr; - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeErr; } int32_t config_vpc_res = ConfigVpcInputData(vpc_input_info, &width, &height); if (config_vpc_res != decodeSucc) { - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeErr; } - DestoryLibjpegSource(&libjpeg_handler, handle); + DestroyLibjpegSource(&libjpeg_handler, handle); return decodeSucc; } diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.h b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.h index 0503187755..e50f64ff55 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_jpegd.h @@ -35,7 +35,7 @@ class SoftJpegd { * @brief : decode interface * @param [in] VpcInfo& vpc_input_info : vpc input information * @param [in] SoftDpProcsessInfo& soft_dp_process_info : softDp process info - * @return : decodeSucc:decode success, decodeErr:decode failed. + * @return : decodeSucc:decode success, decodeErr:decode failed. */ uint32_t JpegdSoftwareDecodeProcess(struct VpcInfo *vpc_input_info, struct SoftDpProcsessInfo *soft_dp_process_info); @@ -50,7 +50,7 @@ class SoftJpegd { * @param [in] int32_t& sub_sample : level of chrominance subsampling in the image * @param [in] int32_t& color_spase : pointer to an integer variable that will receive one of the JPEG * constants, indicating the colorspace of the JPEG image. - * @return : decodeSucc:alloc output buf success, decodeErr:alloc output buf failed. + * @return : decodeSucc:alloc output buf success, decodeErr:alloc output buf failed. */ uint32_t AllocOutputBuffer(struct VpcInfo *vpc_input_info, int32_t *width, int32_t *height, int32_t *sub_sample); @@ -59,7 +59,7 @@ class SoftJpegd { * @param [in] VpcInfo& vpc_input_info : vpc input information * @param [in] int32_t& width : output width * @param [in] int32_t& height : output height - * @return : decodeSucc:config output buf succes, decodeErr:config output buf failed. + * @return : decodeSucc:config output buf success, decodeErr:config output buf failed. */ uint32_t ConfigVpcInputData(struct VpcInfo *vpc_input_info, int32_t *width, int32_t *height); }; diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_vpc.cc b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_vpc.cc index 041803a624..1a67a30e08 100755 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_vpc.cc +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/soft_vpc.cc @@ -375,11 +375,11 @@ void SoftVpc::ChipPreProcess() { YuvWPara *yuv_scaler_paraset = YuvScalerParaSet::GetInstance(); YuvScalerPara *scale = yuv_scaler_paraset->scale; - int32_t index = GetScalerParamterIndex(horizon_coeff_, yuv_scaler_paraset); + int32_t index = GetScalerParameterIndex(horizon_coeff_, yuv_scaler_paraset); y_horizon_tap_ = scale[index].taps_6; uv_horizon_tap_ = scale[index].taps_4; - index = GetScalerParamterIndex(vertical_coeff_, yuv_scaler_paraset); + index = GetScalerParameterIndex(vertical_coeff_, yuv_scaler_paraset); vertical_tap_ = (half_line_mode_) ? scale[index].taps_6 : scale[index].taps_4; } diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.cc b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.cc index 4e32c0cd32..6ea5e34be3 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.cc +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.cc @@ -249,25 +249,25 @@ YuvWPara *YuvScalerParaSet::GetInstance(std::string *paraset_name, uint32_t yuv_ } // Searching for the index number of the filtering parameter by using the dichotomy -int32_t GetScalerParamterIndex(uint32_t paramter, YuvWPara *paramterset) { - int32_t count = paramterset->real_count; +int32_t GetScalerParameterIndex(uint32_t parameter, YuvWPara *parameterset) { + int32_t count = parameterset->real_count; int32_t left = 0; int32_t right = count - 1; - YuvScalerPara *scaler = paramterset->scale; + YuvScalerPara *scaler = parameterset->scale; int32_t index = 0; - if (paramter <= scalerRadio1Time) { + if (parameter <= scalerRadio1Time) { index = 0; } else { - paramter = paramter >> paramterInterval; + parameter = parameter >> parameterInterval; while (left <= right) { index = (left + right) / 2; // 2-point search - if (paramter > scaler[index].range.start && paramter <= scaler[index].range.end) { + if (parameter > scaler[index].range.start && parameter <= scaler[index].range.end) { break; } - if (paramter > scaler[index].range.end) { + if (parameter > scaler[index].range.end) { left = index + 1; - } else if (paramter <= scaler[index].range.start) { + } else if (parameter <= scaler[index].range.start) { right = index - 1; } } diff --git a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.h b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.h index 95c01edc24..aa2f0b71dc 100644 --- a/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.h +++ b/mindspore/ccsrc/minddata/dataset/kernels/image/soft_dvpp/utils/yuv_scaler_para_set.h @@ -25,7 +25,7 @@ const uint32_t maxFileCount = 10; const uint32_t kMaxParaCount = 2048; const uint32_t kScalerCoffNb4 = 32; const uint32_t kScalerCoffNb6 = 48; -const uint32_t paramterInterval = 6; // scaler Number of bits between each group of filtering coefficients +const uint32_t parameterInterval = 6; // scaler Number of bits between each group of filtering coefficients const uint32_t scalerRadio1Time = 0X10000; struct ScalerCoefficientIndex { @@ -83,7 +83,7 @@ class YuvScalerParaSet { static pthread_mutex_t g_mutex_; }; -int32_t GetScalerParamterIndex(uint32_t paramter, YuvWPara *paramterset); +int32_t GetScalerParameterIndex(uint32_t parameter, YuvWPara *parameterset); constexpr YuvWPara YUV_W_PARA = { 1024, diff --git a/mindspore/ccsrc/minddata/dataset/text/kernels/basic_tokenizer_op.h b/mindspore/ccsrc/minddata/dataset/text/kernels/basic_tokenizer_op.h index 39b4ec521d..1414d47b8b 100644 --- a/mindspore/ccsrc/minddata/dataset/text/kernels/basic_tokenizer_op.h +++ b/mindspore/ccsrc/minddata/dataset/text/kernels/basic_tokenizer_op.h @@ -49,7 +49,7 @@ class BasicTokenizerOp : public TensorOp { protected: Status CaseFoldWithoutUnusedWords(const std::string_view &text, const std::unordered_set &unused_words, - std::string *outupt); + std::string *output); Status CaseFoldWithoutUnusedWords(const std::shared_ptr &input, std::shared_ptr *output); std::string Name() const override { return kBasicTokenizerOp; } diff --git a/mindspore/ccsrc/minddata/dataset/util/numa_interface.h b/mindspore/ccsrc/minddata/dataset/util/numa_interface.h index cac400bce5..daa3c0f058 100644 --- a/mindspore/ccsrc/minddata/dataset/util/numa_interface.h +++ b/mindspore/ccsrc/minddata/dataset/util/numa_interface.h @@ -25,7 +25,7 @@ struct bitmask { uint64_t *maskp; }; -// Now we seperate the link from _c_dataengine with numa, +// Now we separate the link from _c_dataengine with numa, // and we use dlopen("libnuma") instead. This function will // return a handle which you can do NumaBind and ReleaseLibrary. void *GetNumaAdapterHandle(); diff --git a/mindspore/ccsrc/minddata/mindrecord/include/shard_column.h b/mindspore/ccsrc/minddata/mindrecord/include/shard_column.h index dc790de7a8..50054d01bf 100644 --- a/mindspore/ccsrc/minddata/mindrecord/include/shard_column.h +++ b/mindspore/ccsrc/minddata/mindrecord/include/shard_column.h @@ -105,7 +105,7 @@ class __attribute__((visibility("default"))) ShardColumn { std::unique_ptr *data_ptr, uint64_t *n_bytes); private: - /// \brief intialization + /// \brief initialization void Init(const json &schema_json, bool compress_integer = true); /// \brief get float value from json diff --git a/mindspore/ccsrc/minddata/mindrecord/include/shard_header.h b/mindspore/ccsrc/minddata/mindrecord/include/shard_header.h index 5bd4a81118..fd3aa9d2d8 100644 --- a/mindspore/ccsrc/minddata/mindrecord/include/shard_header.h +++ b/mindspore/ccsrc/minddata/mindrecord/include/shard_header.h @@ -142,7 +142,7 @@ class __attribute__((visibility("default"))) ShardHeader { /// \brief get the headers from all the shard data /// \param[in] the shard data real path - /// \param[in] the headers which readed from the shard data + /// \param[in] the headers which read from the shard data /// \return SUCCESS/FAILED MSRStatus GetHeaders(const vector &real_addresses, std::vector &headers); diff --git a/mindspore/ccsrc/minddata/mindrecord/io/shard_reader.cc b/mindspore/ccsrc/minddata/mindrecord/io/shard_reader.cc index 4c4840a66d..98c2bbc5e0 100644 --- a/mindspore/ccsrc/minddata/mindrecord/io/shard_reader.cc +++ b/mindspore/ccsrc/minddata/mindrecord/io/shard_reader.cc @@ -163,7 +163,7 @@ MSRStatus ShardReader::Init(const std::vector &file_paths, bool loa auto disk_size = page_size_ * row_group_summary.size(); auto compression_size = shard_header_->GetCompressionSize(); total_blob_size_ = disk_size + compression_size; - MS_LOG(INFO) << "Blob data size, on disk: " << disk_size << " , addtional uncompression: " << compression_size + MS_LOG(INFO) << "Blob data size, on disk: " << disk_size << " , additional uncompression: " << compression_size << " , Total: " << total_blob_size_; MS_LOG(INFO) << "Get meta from mindrecord file & index file successfully."; diff --git a/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc b/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc index a34634fd1d..a3df213234 100644 --- a/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc +++ b/mindspore/ccsrc/minddata/mindrecord/meta/shard_column.cc @@ -326,9 +326,9 @@ std::vector ShardColumn::CompressBlob(const std::vector &blob, auto dst_blob_slice = CompressInt(blob_slice, int_type); // Get new column size auto new_blob_size = UIntToBytesBig(dst_blob_slice.size(), kInt64Type); - // Append new colmn size + // Append new column size dst_blob.insert(dst_blob.end(), new_blob_size.begin(), new_blob_size.end()); - // Append new colmn data + // Append new column data dst_blob.insert(dst_blob.end(), dst_blob_slice.begin(), dst_blob_slice.end()); i_src += kInt64Len + num_bytes; } @@ -344,7 +344,7 @@ vector ShardColumn::CompressInt(const vector &src_bytes, const // Calculate bitmap size (bytes) uint64_t bitmap_size = (src_n_int + kNumDataOfByte - 1) / kNumDataOfByte; - // Initilize destination blob, more space than needed, will be resized + // Initialize destination blob, more space than needed, will be resized vector dst_bytes(kBytesOfColumnLen + bitmap_size + src_bytes.size(), 0); // Write number of elements to destination blob diff --git a/tests/ut/cpp/dataset/common/bboxop_common.h b/tests/ut/cpp/dataset/common/bboxop_common.h index 243908e7a3..52a42216bc 100644 --- a/tests/ut/cpp/dataset/common/bboxop_common.h +++ b/tests/ut/cpp/dataset/common/bboxop_common.h @@ -55,7 +55,7 @@ class BBoxOpCommon : public CVOpCommon { /// \brief Load BBox data from an XML file into a Tensor /// \param[in] path path to XML bbox data file - /// \param[inout] target_BBox pointer to a Tensor to load + /// \param[in, out] target_BBox pointer to a Tensor to load /// \return True if file loaded successfully, false if error -> logged to STD out bool LoadAnnotationFile(const std::string &path, std::shared_ptr *target_BBox); diff --git a/tests/ut/cpp/mindrecord/ut_shard_writer_test.cc b/tests/ut/cpp/mindrecord/ut_shard_writer_test.cc index ce60874a79..031c62f917 100644 --- a/tests/ut/cpp/mindrecord/ut_shard_writer_test.cc +++ b/tests/ut/cpp/mindrecord/ut_shard_writer_test.cc @@ -104,7 +104,7 @@ TEST_F(TestShardWriter, TestShardWriterShiftRawPage) { LoadData(input_path1, json_buffer1, kMaxNum); MS_LOG(INFO) << "Load Meta Data Already."; - // get files' pathes stored in vector image_filenames + // get files' paths stored in vector image_filenames mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir MS_LOG(INFO) << "Only process 10 file names:"; image_filenames.resize(kMaxNum); @@ -236,7 +236,7 @@ TEST_F(TestShardWriter, TestShardWriterTrial) { LoadData(input_path1, json_buffer1, kMaxNum); MS_LOG(INFO) << "Load Meta Data Already."; - // get files' pathes stored in vector image_filenames + // get files' paths stored in vector image_filenames mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir MS_LOG(INFO) << "Only process 10 file names:"; image_filenames.resize(kMaxNum); @@ -375,7 +375,7 @@ TEST_F(TestShardWriter, TestShardWriterTrialNoFields) { LoadData(input_path1, json_buffer1, kMaxNum); MS_LOG(INFO) << "Load Meta Data Already."; - // get files' pathes stored in vector image_filenames + // get files' paths stored in vector image_filenames mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir MS_LOG(INFO) << "Only process 10 file names:"; image_filenames.resize(kMaxNum); @@ -509,7 +509,7 @@ TEST_F(TestShardWriter, DataCheck) { LoadData(input_path1, json_buffer1, kMaxNum); MS_LOG(INFO) << "Load Meta Data Already."; - // get files' pathes stored in vector image_filenames + // get files' paths stored in vector image_filenames mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir MS_LOG(INFO) << "Only process 10 file names:"; image_filenames.resize(kMaxNum); @@ -610,7 +610,7 @@ TEST_F(TestShardWriter, AllRawDataWrong) { LoadData(input_path1, json_buffer1, kMaxNum); MS_LOG(INFO) << "Load Meta Data Already."; - // get files' pathes stored in vector image_filenames + // get files' paths stored in vector image_filenames mindrecord::GetAbsoluteFiles(path_dir, image_filenames); // get all files whose path within path_dir MS_LOG(INFO) << "Only process 10 file names:"; image_filenames.resize(kMaxNum); diff --git a/tests/ut/python/dataset/test_generator_reset_pass.py b/tests/ut/python/dataset/test_generator_reset_pass.py index 3facaf5804..9ee6da1505 100644 --- a/tests/ut/python/dataset/test_generator_reset_pass.py +++ b/tests/ut/python/dataset/test_generator_reset_pass.py @@ -34,7 +34,7 @@ def generator_22to24(): def test_simple_repeat(): - # Since numer of epoch is 1, the GeneratorPass logic will not add the reset logic. + # Since number of epoch is 1, the GeneratorPass logic will not add the reset logic. logger.info("test_simple_repeat") # apply dataset operations data1 = ds.GeneratorDataset(generator_1to2, ["data"]) @@ -59,7 +59,7 @@ def test_generator_reset_1(): branch1 = data1.repeat(4) data2 = ds.GeneratorDataset(generator_10to12, ["data"]) branch2 = data2.repeat(2) - branch2 = branch2.take(10) # Meaningless opearation, just want to insert an op in between + branch2 = branch2.take(10) # Meaningless operation, just want to insert an op in between data3 = ds.GeneratorDataset(generator_22to24, ["data"]) branch3 = data3.repeat(3) branch3 = branch3.skip(1) # Skip the first row @@ -86,7 +86,7 @@ def test_generator_reset_2(): branch1 = data1.repeat(3) data2 = ds.GeneratorDataset(generator_10to12, ["data"]) branch2 = data2.repeat(2) - branch2 = branch2.take(10) # Meaningless opearation, just want to insert an op in between + branch2 = branch2.take(10) # Meaningless operation, just want to insert an op in between data3 = ds.GeneratorDataset(generator_22to24, ["data"]) branch3 = data3.skip(2) # Skip the first row diff --git a/tests/ut/python/dataset/test_pyfunc.py b/tests/ut/python/dataset/test_pyfunc.py index 19b19f3179..ec2ccdf71b 100644 --- a/tests/ut/python/dataset/test_pyfunc.py +++ b/tests/ut/python/dataset/test_pyfunc.py @@ -276,8 +276,8 @@ def test_pyfunc_implicit_compose(): i = i + 4 -def test_pyfunc_execption(): - logger.info("Test PyFunc Execption Throw: lambda x : raise Execption()") +def test_pyfunc_exception(): + logger.info("Test PyFunc Exception Throw: lambda x : raise Exception()") def pyfunc(x): raise Exception("Pyfunc Throw") @@ -292,8 +292,8 @@ def test_pyfunc_execption(): assert "Pyfunc Throw" in str(info.value) -def skip_test_pyfunc_execption_multiprocess(): - logger.info("Test Multiprocess PyFunc Execption Throw: lambda x : raise Execption()") +def skip_test_pyfunc_Exception_multiprocess(): + logger.info("Test Multiprocess PyFunc Exception Throw: lambda x : raise Exception()") def pyfunc(x): raise Exception("MP Pyfunc Throw") @@ -320,5 +320,5 @@ if __name__ == "__main__": test_case_8() test_case_9() test_pyfunc_implicit_compose() - test_pyfunc_execption() - skip_test_pyfunc_execption_multiprocess() + test_pyfunc_exception() + skip_test_pyfunc_exception_multiprocess() diff --git a/tests/ut/python/dataset/test_random_rotation.py b/tests/ut/python/dataset/test_random_rotation.py index 92a1782c77..2a037b452e 100644 --- a/tests/ut/python/dataset/test_random_rotation.py +++ b/tests/ut/python/dataset/test_random_rotation.py @@ -154,7 +154,7 @@ def test_random_rotation_md5(): original_seed = config_get_set_seed(5) original_num_parallel_workers = config_get_set_num_parallel_workers(1) - # Fisrt dataset + # First dataset data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False) decode_op = c_vision.Decode() resize_op = c_vision.RandomRotation((0, 90), diff --git a/tests/ut/python/dataset/test_take.py b/tests/ut/python/dataset/test_take.py index 07ec95466b..3754aba0f8 100644 --- a/tests/ut/python/dataset/test_take.py +++ b/tests/ut/python/dataset/test_take.py @@ -305,7 +305,7 @@ def test_take_16(): def test_take_17(): """ - Test take: take first, then do fiter operation + Test take: take first, then do filter operation """ logger.info("test_take_17") data1 = ds.GeneratorDataset(generator_10, ["data"]) @@ -322,7 +322,7 @@ def test_take_17(): def test_take_18(): """ - Test take: take first, then do fiter, skip, batch and repeat operation + Test take: take first, then do filter, skip, batch and repeat operation """ logger.info("test_take_18") data1 = ds.GeneratorDataset(generator_10, ["data"]) diff --git a/tests/ut/python/mindrecord/skip_test_issue.py b/tests/ut/python/mindrecord/skip_test_issue.py index 1ec10b005c..52f8ae170f 100644 --- a/tests/ut/python/mindrecord/skip_test_issue.py +++ b/tests/ut/python/mindrecord/skip_test_issue.py @@ -547,7 +547,7 @@ def skip_test_issue_155(): count = 0 for _ in reader.get_next(): count += 1 - assert count == 10000, "Failed to read mutiple writed data." + assert count == 10000, "Failed to read multiple writed data." def test_issue_124(): diff --git a/tests/ut/python/mindrecord/test_cifar100_to_mindrecord.py b/tests/ut/python/mindrecord/test_cifar100_to_mindrecord.py index 5cf778c889..37d13f0c2b 100644 --- a/tests/ut/python/mindrecord/test_cifar100_to_mindrecord.py +++ b/tests/ut/python/mindrecord/test_cifar100_to_mindrecord.py @@ -46,7 +46,7 @@ def test_cifar100_to_mindrecord_without_index_fields(fixture_file): """test transform cifar100 dataset to mindrecord without index fields.""" cifar100_transformer = Cifar100ToMR(CIFAR100_DIR, MINDRECORD_FILE) ret = cifar100_transformer.transform() - assert ret == SUCCESS, "Failed to tranform from cifar100 to mindrecord" + assert ret == SUCCESS, "Failed to transform from cifar100 to mindrecord" assert os.path.exists(MINDRECORD_FILE) assert os.path.exists(MINDRECORD_FILE + "_test") read() diff --git a/tests/ut/python/mindrecord/test_mindrecord_exception.py b/tests/ut/python/mindrecord/test_mindrecord_exception.py index e37d9692a4..2a6bce9d10 100644 --- a/tests/ut/python/mindrecord/test_mindrecord_exception.py +++ b/tests/ut/python/mindrecord/test_mindrecord_exception.py @@ -591,27 +591,27 @@ def test_write_with_invalid_data(): writer.write_raw_data(data) writer.commit() - # field: label => lable + # field: label => labels with pytest.raises(Exception, match="Failed to write dataset"): remove_one_file(mindrecord_file_name) remove_one_file(mindrecord_file_name + ".db") - data = [{"file_name": "001.jpg", "lable": 43, "score": 0.8, "mask": np.array([3, 6, 9], dtype=np.int64), + data = [{"file_name": "001.jpg", "labels": 43, "score": 0.8, "mask": np.array([3, 6, 9], dtype=np.int64), "segments": np.array([[5.0, 1.6], [65.2, 8.3]], dtype=np.float32), "data": bytes("image bytes abc", encoding='UTF-8')}, - {"file_name": "002.jpg", "lable": 91, "score": 5.4, "mask": np.array([1, 4, 7], dtype=np.int64), + {"file_name": "002.jpg", "labels": 91, "score": 5.4, "mask": np.array([1, 4, 7], dtype=np.int64), "segments": np.array([[5.1, 9.1], [2.0, 65.4]], dtype=np.float32), "data": bytes("image bytes def", encoding='UTF-8')}, - {"file_name": "003.jpg", "lable": 61, "score": 6.4, "mask": np.array([7, 6, 3], dtype=np.int64), + {"file_name": "003.jpg", "labels": 61, "score": 6.4, "mask": np.array([7, 6, 3], dtype=np.int64), "segments": np.array([[0.0, 5.6], [3.0, 16.3]], dtype=np.float32), "data": bytes("image bytes ghi", encoding='UTF-8')}, - {"file_name": "004.jpg", "lable": 29, "score": 8.1, "mask": np.array([2, 8, 0], dtype=np.int64), + {"file_name": "004.jpg", "labels": 29, "score": 8.1, "mask": np.array([2, 8, 0], dtype=np.int64), "segments": np.array([[5.9, 7.2], [4.0, 89.0]], dtype=np.float32), "data": bytes("image bytes jkl", encoding='UTF-8')}, - {"file_name": "005.jpg", "lable": 78, "score": 7.7, "mask": np.array([3, 1, 2], dtype=np.int64), + {"file_name": "005.jpg", "labels": 78, "score": 7.7, "mask": np.array([3, 1, 2], dtype=np.int64), "segments": np.array([[0.6, 8.1], [5.3, 49.3]], dtype=np.float32), "data": bytes("image bytes mno", encoding='UTF-8')}, - {"file_name": "006.jpg", "lable": 37, "score": 9.4, "mask": np.array([7, 6, 7], dtype=np.int64), + {"file_name": "006.jpg", "labels": 37, "score": 9.4, "mask": np.array([7, 6, 7], dtype=np.int64), "segments": np.array([[4.2, 6.3], [8.9, 81.8]], dtype=np.float32), "data": bytes("image bytes pqr", encoding='UTF-8')} ]