Fix some non-minddata typos

Signed-off-by: alex-yuyue <yue.yu1@huawei.com>
pull/9689/head
alex-yuyue 4 years ago
parent 934005f390
commit 5250b327ae

@ -199,7 +199,7 @@ Status ReduceMethod::InferForwardCommunication() {
return SUCCESS;
}
ForwardOp CreatReduceMeanForwardOp(const std::vector<Group> &forward_group, const TypePtr &dtype) {
ForwardOp CreateReduceMeanForwardOp(const std::vector<Group> &forward_group, const TypePtr &dtype) {
// Creat AllReduceSum op
Operator op0 = CreateAllReduceOp(REDUCE_OP_SUM, forward_group[0].name());
std::string group_name = forward_group[0].name();
@ -275,7 +275,7 @@ Status ReduceMeanInfo::InferForwardCommunication() {
}
auto element_type = outputs_dtype_->cast<mindspore::TensorTypePtr>()->element();
forward_op_ = CreatReduceMeanForwardOp(forward_group, element_type);
forward_op_ = CreateReduceMeanForwardOp(forward_group, element_type);
}
return SUCCESS;

@ -413,7 +413,7 @@ def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="fast
def create_fasterrcnn_dataset(mindrecord_file, batch_size=2, device_num=1, rank_id=0, is_training=True,
num_parallel_workers=8):
"""Creatr FasterRcnn dataset with MindDataset."""
"""Create FasterRcnn dataset with MindDataset."""
cv2.setNumThreads(0)
de.config.set_prefetch_size(8)
ds = de.MindDataset(mindrecord_file, columns_list=["image", "annotation"], num_shards=device_num, shard_id=rank_id,

@ -390,7 +390,7 @@ def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.
def create_ssd_dataset(mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0,
is_training=True, num_parallel_workers=4, use_multiprocessing=True):
"""Creatr SSD dataset with MindDataset."""
"""Create SSD dataset with MindDataset."""
ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num,
shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training)
decode = C.Decode()

@ -291,7 +291,7 @@ def data_to_mindrecord_byte_image(image_dir, anno_path, mindrecord_dir, prefix,
def create_yolo_dataset(mindrecord_dir, batch_size=32, repeat_num=1, device_num=1, rank=0,
is_training=True, num_parallel_workers=8):
"""Creatr YOLOv3 dataset with MindDataset."""
"""Create YOLOv3 dataset with MindDataset."""
ds = de.MindDataset(mindrecord_dir, columns_list=["image", "annotation"], num_shards=device_num, shard_id=rank,
num_parallel_workers=num_parallel_workers, shuffle=is_training)
decode = C.Decode()

@ -43,7 +43,7 @@ def lstm_create_dataset(data_home, batch_size, repeat_num=1, training=True):
def _convert_to_mindrecord(data_home, features, labels, weight_np=None, training=True):
"""
convert imdb dataset to mindrecoed dataset
convert imdb dataset to mindrecord dataset
"""
if weight_np is not None:
np.savetxt(os.path.join(data_home, 'weight.txt'), weight_np)
@ -76,7 +76,7 @@ def _convert_to_mindrecord(data_home, features, labels, weight_np=None, training
def convert_to_mindrecord(embed_size, aclimdb_path, preprocess_path, glove_path):
"""
convert imdb dataset to mindrecoed dataset
convert imdb dataset to mindrecord dataset
"""
parser = ImdbParser(aclimdb_path, glove_path, embed_size)
parser.parse()

@ -394,7 +394,7 @@ def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.
def create_ssd_dataset(mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0,
is_training=True, num_parallel_workers=4):
"""Creatr SSD dataset with MindDataset."""
"""Create SSD dataset with MindDataset."""
ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num,
shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training)
decode = C.Decode()

@ -294,7 +294,7 @@ def data_to_mindrecord_byte_image(image_dir, anno_path, mindrecord_dir, prefix="
def create_yolo_dataset(mindrecord_dir, batch_size=32, repeat_num=10, device_num=1, rank=0,
is_training=True, num_parallel_workers=8):
"""Creatr YOLOv3 dataset with MindDataset."""
"""Create YOLOv3 dataset with MindDataset."""
ds = de.MindDataset(mindrecord_dir, columns_list=["image", "annotation"], num_shards=device_num, shard_id=rank,
num_parallel_workers=num_parallel_workers, shuffle=False)
decode = C.Decode()

Loading…
Cancel
Save