!8663 Modify alarm information

From: @shenwei41
Reviewed-by: @pandoublefeng,@heleiwang
Signed-off-by: @pandoublefeng
pull/8663/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 17acf2bcaa

@ -46,6 +46,8 @@ class SliceOption {
explicit SliceOption(Slice slice) : slice_(slice) {}
SliceOption(SliceOption const &slice) = default;
~SliceOption() = default;
// only one of the following will be valid
// given indices to slice the Tensor.
std::vector<dsize_t> indices_ = {};

@ -400,6 +400,7 @@ class ServerStopRequest : public BaseRequest {
explicit ServerStopRequest(int32_t qID) : BaseRequest(RequestType::kStopService) {
rq_.add_buf_data(std::to_string(qID));
}
~ServerStopRequest() = default;
Status PostReply() override;
};

@ -32,6 +32,8 @@ class PythonIteratorConsumer : public IteratorConsumer {
/// Constructor which will call the base class default constructor.
/// \param num_epochs number of epochs. Default to -1 (infinite epochs).
explicit PythonIteratorConsumer(int32_t num_epochs = -1) : IteratorConsumer(num_epochs) {}
~PythonIteratorConsumer() = default;
/// Returns the next row in a vector format
/// \param[out] out std::vector of Tensors
/// \return Status error code

@ -58,6 +58,8 @@ class IteratorConsumer : public TreeConsumer {
/// \param num_epochs number of epochs. Default to -1 (infinite epochs).
explicit IteratorConsumer(int32_t num_epochs = -1) : TreeConsumer(), num_epochs_(num_epochs) {}
~IteratorConsumer() = default;
Status Init(std::shared_ptr<DatasetNode> d) override;
/// Returns the next row in a vector format
@ -90,6 +92,8 @@ class SaveToDisk : public TreeConsumer {
explicit SaveToDisk(std::string dataset_path, int32_t num_files = 1, std::string dataset_type = "mindrecord")
: TreeConsumer(), dataset_path_(dataset_path), num_files_(num_files), dataset_type_(dataset_type) {}
~SaveToDisk() = default;
/// \brief Parameters validation
/// \return Status Status::OK() if all the parameters are valid
Status ValidateParams();
@ -130,6 +134,8 @@ class ToDevice : public TreeConsumer {
explicit ToDevice(bool send_epoch_end, int32_t num_epochs = -1)
: TreeConsumer(), send_epoch_end_(send_epoch_end), num_epochs_(num_epochs) {}
~ToDevice() = default;
Status Init(std::shared_ptr<DatasetNode> d) override;
/// Send the data to device
@ -159,6 +165,7 @@ class ToDevice : public TreeConsumer {
class TreeGetters : public TreeConsumer {
public:
TreeGetters();
~TreeGetters() = default;
Status Init(std::shared_ptr<DatasetNode> d) override;
Status GetDatasetSize(int64_t *size);
Status GetOutputTypes(std::vector<DataType> *types);
@ -184,6 +191,8 @@ class BuildVocabConsumer : public TreeConsumer {
/// BuildVocabConsumer Constructor which will call the base class default constructor.
BuildVocabConsumer() = default;
~BuildVocabConsumer() = default;
Status Init(std::shared_ptr<DatasetNode> d) override;
/// Start consuming

@ -424,6 +424,7 @@ class SchemaObj {
class BatchDataset : public Dataset {
public:
BatchDataset(std::shared_ptr<Dataset> input, int32_t batch_size, bool drop_remainder = false);
~BatchDataset() = default;
};
#ifndef ENABLE_ANDROID
@ -435,17 +436,20 @@ class BucketBatchByLengthDataset : public Dataset {
std::function<TensorRow(TensorRow)> element_length_function = nullptr,
const std::map<std::string, std::pair<TensorShape, std::shared_ptr<Tensor>>> &pad_info = {},
bool pad_to_bucket_boundary = false, bool drop_remainder = false);
~BucketBatchByLengthDataset() = default;
};
class ConcatDataset : public Dataset {
public:
explicit ConcatDataset(const std::vector<std::shared_ptr<Dataset>> &input);
~ConcatDataset() = default;
};
class FilterDataset : public Dataset {
public:
FilterDataset(std::shared_ptr<Dataset> input, std::function<TensorRow(TensorRow)> predicate,
std::vector<std::string> input_columns);
~FilterDataset() = default;
};
#endif
@ -455,11 +459,13 @@ class MapDataset : public Dataset {
std::vector<std::string> input_columns, std::vector<std::string> output_columns,
const std::vector<std::string> &project_columns, const std::shared_ptr<DatasetCache> &cache,
std::vector<std::shared_ptr<DSCallback>> callbacks);
~MapDataset() = default;
};
class ProjectDataset : public Dataset {
public:
ProjectDataset(std::shared_ptr<Dataset> input, const std::vector<std::string> &columns);
~ProjectDataset() = default;
};
#ifndef ENABLE_ANDROID
@ -473,27 +479,32 @@ class RenameDataset : public Dataset {
class RepeatDataset : public Dataset {
public:
RepeatDataset(std::shared_ptr<Dataset> input, int32_t count);
~RepeatDataset() = default;
};
class ShuffleDataset : public Dataset {
public:
ShuffleDataset(std::shared_ptr<Dataset> input, int32_t buffer_size);
~ShuffleDataset() = default;
};
#ifndef ENABLE_ANDROID
class SkipDataset : public Dataset {
public:
SkipDataset(std::shared_ptr<Dataset> input, int32_t count);
~SkipDataset() = default;
};
class TakeDataset : public Dataset {
public:
TakeDataset(std::shared_ptr<Dataset> input, int32_t count);
~TakeDataset() = default;
};
class ZipDataset : public Dataset {
public:
explicit ZipDataset(const std::vector<std::shared_ptr<Dataset>> &inputs);
~ZipDataset() = default;
};
#endif
/// \brief Function to create a SchemaObj
@ -507,6 +518,7 @@ class AlbumDataset : public Dataset {
const std::vector<std::string> &column_names = {}, bool decode = false,
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~AlbumDataset() = default;
};
/// \brief Function to create an AlbumDataset
@ -533,6 +545,7 @@ class CelebADataset : public Dataset {
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(), bool decode = false,
const std::set<std::string> &extensions = {},
const std::shared_ptr<DatasetCache> &cache = nullptr);
~CelebADataset() = default;
};
/// \brief Function to create a CelebADataset
@ -557,6 +570,7 @@ class Cifar10Dataset : public Dataset {
explicit Cifar10Dataset(const std::string &dataset_dir, const std::string &usage = "all",
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~Cifar10Dataset() = default;
};
/// \brief Function to create a Cifar10 Dataset
@ -577,6 +591,7 @@ class Cifar100Dataset : public Dataset {
explicit Cifar100Dataset(const std::string &dataset_dir, const std::string &usage = "all",
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~Cifar100Dataset() = default;
};
/// \brief Function to create a Cifar100 Dataset
@ -598,6 +613,7 @@ class CLUEDataset : public Dataset {
const std::string &usage = "train", int64_t num_samples = 0,
ShuffleMode shuffle = ShuffleMode::kGlobal, int32_t num_shards = 1, int32_t shard_id = 0,
const std::shared_ptr<DatasetCache> &cache = nullptr);
~CLUEDataset() = default;
};
/// \brief Function to create a CLUEDataset
@ -629,6 +645,7 @@ class CocoDataset : public Dataset {
CocoDataset(const std::string &dataset_dir, const std::string &annotation_file, const std::string &task = "Detection",
const bool &decode = false, const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~CocoDataset() = default;
};
/// \brief Function to create a CocoDataset
@ -661,6 +678,7 @@ class CSVDataset : public Dataset {
const std::vector<std::string> &column_names = {}, int64_t num_samples = 0,
ShuffleMode shuffle = ShuffleMode::kGlobal, int32_t num_shards = 1, int32_t shard_id = 0,
const std::shared_ptr<DatasetCache> &cache = nullptr);
~CSVDataset() = default;
};
/// \brief Function to create a CSVDataset
@ -698,6 +716,7 @@ class ImageFolderDataset : public Dataset {
const std::set<std::string> &extensions = {},
const std::map<std::string, int32_t> &class_indexing = {},
const std::shared_ptr<DatasetCache> &cache = nullptr);
~ImageFolderDataset() = default;
};
/// \brief Function to create an ImageFolderDataset
@ -725,6 +744,7 @@ class ManifestDataset : public Dataset {
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::map<std::string, int32_t> &class_indexing = {}, bool decode = false,
const std::shared_ptr<DatasetCache> &cache = nullptr);
~ManifestDataset() = default;
};
/// \brief Function to create a ManifestDataset
@ -753,6 +773,7 @@ class MindDataDataset : public Dataset {
const std::vector<std::string> &columns_list = {},
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
nlohmann::json padded_sample = nullptr, int64_t num_padded = 0);
~MindDataDataset() = default;
};
/// \brief Function to create a MindDataDataset
@ -789,6 +810,7 @@ class MnistDataset : public Dataset {
explicit MnistDataset(const std::string &dataset_dir, const std::string &usage = "all",
const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~MnistDataset() = default;
};
/// \brief Function to create a MnistDataset
@ -819,6 +841,8 @@ class RandomDataDataset : public Dataset {
RandomDataDataset(const int32_t &total_rows, std::string schema_path, const std::vector<std::string> &columns_list,
std::shared_ptr<DatasetCache> cache);
~RandomDataDataset() = default;
};
/// \brief Function to create a RandomDataset
@ -849,6 +873,7 @@ class TextFileDataset : public Dataset {
explicit TextFileDataset(const std::vector<std::string> &dataset_files, int64_t num_samples = 0,
ShuffleMode shuffle = ShuffleMode::kGlobal, int32_t num_shards = 1, int32_t shard_id = 0,
const std::shared_ptr<DatasetCache> &cache = nullptr);
~TextFileDataset() = default;
};
/// \brief Function to create a TextFileDataset
@ -883,6 +908,8 @@ class TFRecordDataset : public Dataset {
TFRecordDataset(const std::vector<std::string> &dataset_files, std::shared_ptr<SchemaObj> schema,
const std::vector<std::string> &columns_list, int64_t num_samples, ShuffleMode shuffle,
int32_t num_shards, int32_t shard_id, bool shard_equal_rows, std::shared_ptr<DatasetCache> cache);
~TFRecordDataset() = default;
};
/// \brief Function to create a TFRecordDataset
@ -941,6 +968,7 @@ class VOCDataset : public Dataset {
const std::string &usage = "train", const std::map<std::string, int32_t> &class_indexing = {},
bool decode = false, const std::shared_ptr<SamplerObj> &sampler = RandomSampler(),
const std::shared_ptr<DatasetCache> &cache = nullptr);
~VOCDataset() = default;
};
/// \brief Function to create a VOCDataset

Loading…
Cancel
Save