parent
fcdc88cca9
commit
ce03ce5af2
@ -0,0 +1,108 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
"""train_multinpu."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from mindspore import Model, context
|
||||||
|
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor
|
||||||
|
from mindspore.train import ParallelMode
|
||||||
|
from mindspore.communication.management import get_rank, get_group_size, init
|
||||||
|
from mindspore.parallel import _cost_model_context as cost_model_context
|
||||||
|
from mindspore.nn.wrap.cell_wrapper import VirtualDatasetCellTriple
|
||||||
|
|
||||||
|
from src.wide_and_deep import PredictWithSigmoid, TrainStepWrap, NetWithLossClass, WideDeepModel
|
||||||
|
from src.callbacks import LossCallBack, EvalCallBack
|
||||||
|
from src.datasets import create_dataset
|
||||||
|
from src.metrics import AUCMetric
|
||||||
|
from src.config import WideDeepConfig
|
||||||
|
|
||||||
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=True)
|
||||||
|
context.set_auto_parallel_context(parallel_mode=ParallelMode.AUTO_PARALLEL, mirror_mean=True)
|
||||||
|
cost_model_context.set_cost_model_context(multi_subgraphs=True)
|
||||||
|
init()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_WideDeep_net(config):
|
||||||
|
WideDeep_net = WideDeepModel(config)
|
||||||
|
loss_net = NetWithLossClass(WideDeep_net, config)
|
||||||
|
loss_net = VirtualDatasetCellTriple(loss_net)
|
||||||
|
train_net = TrainStepWrap(loss_net)
|
||||||
|
eval_net = PredictWithSigmoid(WideDeep_net)
|
||||||
|
eval_net = VirtualDatasetCellTriple(eval_net)
|
||||||
|
return train_net, eval_net
|
||||||
|
|
||||||
|
|
||||||
|
class ModelBuilder():
|
||||||
|
"""
|
||||||
|
ModelBuilder
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_hook(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_train_hook(self):
|
||||||
|
hooks = []
|
||||||
|
callback = LossCallBack()
|
||||||
|
hooks.append(callback)
|
||||||
|
if int(os.getenv('DEVICE_ID')) == 0:
|
||||||
|
pass
|
||||||
|
return hooks
|
||||||
|
|
||||||
|
def get_net(self, config):
|
||||||
|
return get_WideDeep_net(config)
|
||||||
|
|
||||||
|
|
||||||
|
def test_train_eval():
|
||||||
|
"""
|
||||||
|
test_train_eval
|
||||||
|
"""
|
||||||
|
config = WideDeepConfig()
|
||||||
|
data_path = config.data_path
|
||||||
|
batch_size = config.batch_size
|
||||||
|
epochs = config.epochs
|
||||||
|
print("epochs is {}".format(epochs))
|
||||||
|
ds_train = create_dataset(data_path, train_mode=True, epochs=epochs,
|
||||||
|
batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1,
|
||||||
|
batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
print("ds_train.size: {}".format(ds_train.get_dataset_size()))
|
||||||
|
print("ds_eval.size: {}".format(ds_eval.get_dataset_size()))
|
||||||
|
|
||||||
|
net_builder = ModelBuilder()
|
||||||
|
|
||||||
|
train_net, eval_net = net_builder.get_net(config)
|
||||||
|
train_net.set_train()
|
||||||
|
auc_metric = AUCMetric()
|
||||||
|
|
||||||
|
model = Model(train_net, eval_network=eval_net, metrics={"auc": auc_metric})
|
||||||
|
|
||||||
|
eval_callback = EvalCallBack(model, ds_eval, auc_metric, config)
|
||||||
|
|
||||||
|
callback = LossCallBack(config=config)
|
||||||
|
ckptconfig = CheckpointConfig(save_checkpoint_steps=ds_train.get_dataset_size(), keep_checkpoint_max=5)
|
||||||
|
ckpoint_cb = ModelCheckpoint(prefix='widedeep_train',
|
||||||
|
directory=config.ckpt_path, config=ckptconfig)
|
||||||
|
model.train(epochs, ds_train,
|
||||||
|
callbacks=[TimeMonitor(ds_train.get_dataset_size()), eval_callback, callback, ckpoint_cb])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_train_eval()
|
@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
LOCAL_HIAI=/usr/local/Ascend
|
||||||
|
export TBE_IMPL_PATH=${LOCAL_HIAI}/runtime/ops/op_impl/built-in/ai_core/tbe/impl/:${TBE_IMPL_PATH}
|
||||||
|
export LD_LIBRARY_PATH=${LOCAL_HIAI}/runtime/lib64/:${LOCAL_HIAI}/add-ons/:${LD_LIBRARY_PATH}
|
||||||
|
export PATH=${LOCAL_HIAI}/runtime/ccec_compiler/bin/:${PATH}
|
||||||
|
export PYTHONPATH=${LOCAL_HIAI}/runtime/ops/op_impl/built-in/ai_core/tbe/:${PYTHONPATH}
|
||||||
|
export DEVICE_MEMORY_CAPACITY=1073741824000
|
||||||
|
export NOT_FULLY_USE_DEVICES=off
|
@ -0,0 +1,92 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
""" config. """
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
|
def argparse_init():
|
||||||
|
"""
|
||||||
|
argparse_init
|
||||||
|
"""
|
||||||
|
parser = argparse.ArgumentParser(description='WideDeep')
|
||||||
|
parser.add_argument("--data_path", type=str, default="./test_raw_data/")
|
||||||
|
parser.add_argument("--epochs", type=int, default=15)
|
||||||
|
parser.add_argument("--batch_size", type=int, default=16000)
|
||||||
|
parser.add_argument("--eval_batch_size", type=int, default=16000)
|
||||||
|
parser.add_argument("--field_size", type=int, default=39)
|
||||||
|
parser.add_argument("--vocab_size", type=int, default=184965)
|
||||||
|
parser.add_argument("--emb_dim", type=int, default=80)
|
||||||
|
parser.add_argument("--deep_layer_dim", type=int, nargs='+', default=[1024, 512, 256, 128])
|
||||||
|
parser.add_argument("--deep_layer_act", type=str, default='relu')
|
||||||
|
parser.add_argument("--keep_prob", type=float, default=1.0)
|
||||||
|
|
||||||
|
parser.add_argument("--output_path", type=str, default="./output/")
|
||||||
|
parser.add_argument("--ckpt_path", type=str, default="./checkpoints/")
|
||||||
|
parser.add_argument("--eval_file_name", type=str, default="eval.log")
|
||||||
|
parser.add_argument("--loss_file_name", type=str, default="loss.log")
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
class WideDeepConfig():
|
||||||
|
"""
|
||||||
|
WideDeepConfig
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
self.data_path = "/home/workspace/mindspore_dataset/criteo_data/mindrecord"
|
||||||
|
self.epochs = 1
|
||||||
|
self.batch_size = 16000
|
||||||
|
self.eval_batch_size = 16000
|
||||||
|
self.field_size = 39
|
||||||
|
self.vocab_size = 184968
|
||||||
|
self.emb_dim = 64
|
||||||
|
self.deep_layer_dim = [1024, 512, 256, 128]
|
||||||
|
self.deep_layer_act = 'relu'
|
||||||
|
self.weight_bias_init = ['normal', 'normal']
|
||||||
|
self.emb_init = 'normal'
|
||||||
|
self.init_args = [-0.01, 0.01]
|
||||||
|
self.dropout_flag = False
|
||||||
|
self.keep_prob = 1.0
|
||||||
|
self.l2_coef = 8e-5
|
||||||
|
|
||||||
|
self.output_path = "./output"
|
||||||
|
self.eval_file_name = "eval.log"
|
||||||
|
self.loss_file_name = "loss.log"
|
||||||
|
self.ckpt_path = "./checkpoints/"
|
||||||
|
|
||||||
|
def argparse_init(self):
|
||||||
|
"""
|
||||||
|
argparse_init
|
||||||
|
"""
|
||||||
|
parser = argparse_init()
|
||||||
|
args, _ = parser.parse_known_args()
|
||||||
|
self.data_path = args.data_path
|
||||||
|
self.epochs = args.epochs
|
||||||
|
self.batch_size = args.batch_size
|
||||||
|
self.eval_batch_size = args.eval_batch_size
|
||||||
|
self.field_size = args.field_size
|
||||||
|
self.vocab_size = args.vocab_size
|
||||||
|
self.emb_dim = args.emb_dim
|
||||||
|
self.deep_layer_dim = args.deep_layer_dim
|
||||||
|
self.deep_layer_act = args.deep_layer_act
|
||||||
|
self.keep_prob = args.keep_prob
|
||||||
|
self.weight_bias_init = ['normal', 'normal']
|
||||||
|
self.emb_init = 'normal'
|
||||||
|
self.init_args = [-0.01, 0.01]
|
||||||
|
self.dropout_flag = False
|
||||||
|
self.l2_coef = 8e-5
|
||||||
|
|
||||||
|
self.output_path = args.output_path
|
||||||
|
self.eval_file_name = args.eval_file_name
|
||||||
|
self.loss_file_name = args.loss_file_name
|
||||||
|
self.ckpt_path = args.ckpt_path
|
@ -0,0 +1,116 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
"""train_imagenet."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
from enum import Enum
|
||||||
|
import numpy as np
|
||||||
|
import mindspore.dataset.engine as de
|
||||||
|
import mindspore.common.dtype as mstype
|
||||||
|
|
||||||
|
class DataType(Enum):
|
||||||
|
"""
|
||||||
|
Enumerate supported dataset format.
|
||||||
|
"""
|
||||||
|
MINDRECORD = 1
|
||||||
|
TFRECORD = 2
|
||||||
|
H5 = 3
|
||||||
|
|
||||||
|
def _get_tf_dataset(data_dir, train_mode=True, epochs=1, batch_size=1000,
|
||||||
|
line_per_sample=1000, rank_size=None, rank_id=None):
|
||||||
|
"""
|
||||||
|
get_tf_dataset
|
||||||
|
"""
|
||||||
|
dataset_files = []
|
||||||
|
file_prefix_name = 'train' if train_mode else 'test'
|
||||||
|
shuffle = train_mode
|
||||||
|
for (dirpath, _, filenames) in os.walk(data_dir):
|
||||||
|
for filename in filenames:
|
||||||
|
if file_prefix_name in filename and "tfrecord" in filename:
|
||||||
|
dataset_files.append(os.path.join(dirpath, filename))
|
||||||
|
schema = de.Schema()
|
||||||
|
schema.add_column('feat_ids', de_type=mstype.int32)
|
||||||
|
schema.add_column('feat_vals', de_type=mstype.float32)
|
||||||
|
schema.add_column('label', de_type=mstype.float32)
|
||||||
|
if rank_size is not None and rank_id is not None:
|
||||||
|
ds = de.TFRecordDataset(dataset_files=dataset_files, shuffle=shuffle, schema=schema, num_parallel_workers=8,
|
||||||
|
num_shards=rank_size, shard_id=rank_id, shard_equal_rows=True)
|
||||||
|
else:
|
||||||
|
ds = de.TFRecordDataset(dataset_files=dataset_files, shuffle=shuffle, schema=schema, num_parallel_workers=8)
|
||||||
|
ds = ds.batch(int(batch_size / line_per_sample),
|
||||||
|
drop_remainder=True)
|
||||||
|
ds = ds.map(operations=(lambda x, y, z: (
|
||||||
|
np.array(x).flatten().reshape(batch_size, 39),
|
||||||
|
np.array(y).flatten().reshape(batch_size, 39),
|
||||||
|
np.array(z).flatten().reshape(batch_size, 1))),
|
||||||
|
input_columns=['feat_ids', 'feat_vals', 'label'],
|
||||||
|
columns_order=['feat_ids', 'feat_vals', 'label'], num_parallel_workers=8)
|
||||||
|
#if train_mode:
|
||||||
|
ds = ds.repeat(epochs)
|
||||||
|
return ds
|
||||||
|
|
||||||
|
def _get_mindrecord_dataset(directory, train_mode=True, epochs=1, batch_size=1000,
|
||||||
|
line_per_sample=1000, rank_size=None, rank_id=None):
|
||||||
|
"""
|
||||||
|
Get dataset with mindrecord format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory (str): Dataset directory.
|
||||||
|
train_mode (bool): Whether dataset is use for train or eval (default=True).
|
||||||
|
epochs (int): Dataset epoch size (default=1).
|
||||||
|
batch_size (int): Dataset batch size (default=1000).
|
||||||
|
line_per_sample (int): The number of sample per line (default=1000).
|
||||||
|
rank_size (int): The number of device, not necessary for single device (default=None).
|
||||||
|
rank_id (int): Id of device, not necessary for single device (default=None).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dataset.
|
||||||
|
"""
|
||||||
|
file_prefix_name = 'train_input_part.mindrecord' if train_mode else 'test_input_part.mindrecord'
|
||||||
|
file_suffix_name = '00' if train_mode else '0'
|
||||||
|
shuffle = train_mode
|
||||||
|
|
||||||
|
if rank_size is not None and rank_id is not None:
|
||||||
|
ds = de.MindDataset(os.path.join(directory, file_prefix_name + file_suffix_name),
|
||||||
|
columns_list=['feat_ids', 'feat_vals', 'label'],
|
||||||
|
num_shards=rank_size, shard_id=rank_id, shuffle=shuffle,
|
||||||
|
num_parallel_workers=8)
|
||||||
|
else:
|
||||||
|
ds = de.MindDataset(os.path.join(directory, file_prefix_name + file_suffix_name),
|
||||||
|
columns_list=['feat_ids', 'feat_vals', 'label'],
|
||||||
|
shuffle=shuffle, num_parallel_workers=8)
|
||||||
|
ds = ds.batch(int(batch_size / line_per_sample), drop_remainder=True)
|
||||||
|
ds = ds.map(operations=(lambda x, y, z: (np.array(x).flatten().reshape(batch_size, 39),
|
||||||
|
np.array(y).flatten().reshape(batch_size, 39),
|
||||||
|
np.array(z).flatten().reshape(batch_size, 1))),
|
||||||
|
input_columns=['feat_ids', 'feat_vals', 'label'],
|
||||||
|
columns_order=['feat_ids', 'feat_vals', 'label'],
|
||||||
|
num_parallel_workers=8)
|
||||||
|
ds = ds.repeat(epochs)
|
||||||
|
return ds
|
||||||
|
|
||||||
|
|
||||||
|
def create_dataset(data_dir, train_mode=True, epochs=1, batch_size=1000,
|
||||||
|
data_type=DataType.TFRECORD, line_per_sample=1000, rank_size=None, rank_id=None):
|
||||||
|
"""
|
||||||
|
create_dataset
|
||||||
|
"""
|
||||||
|
if data_type == DataType.TFRECORD:
|
||||||
|
return _get_tf_dataset(data_dir, train_mode, epochs, batch_size,
|
||||||
|
line_per_sample, rank_size=rank_size, rank_id=rank_id)
|
||||||
|
return _get_mindrecord_dataset(data_dir, train_mode, epochs,
|
||||||
|
batch_size, line_per_sample,
|
||||||
|
rank_size, rank_id)
|
@ -0,0 +1,108 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
"""train_multinpu."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from mindspore import Model, context
|
||||||
|
from mindspore.train.callback import TimeMonitor
|
||||||
|
from mindspore.train import ParallelMode
|
||||||
|
from mindspore.communication.management import get_rank, get_group_size, init
|
||||||
|
from mindspore.parallel import _cost_model_context as cost_model_context
|
||||||
|
from mindspore.nn.wrap.cell_wrapper import VirtualDatasetCellTriple
|
||||||
|
|
||||||
|
from src.wide_and_deep import PredictWithSigmoid, TrainStepWrap, NetWithLossClass, WideDeepModel
|
||||||
|
from src.callbacks import LossCallBack, EvalCallBack
|
||||||
|
from src.datasets import create_dataset, DataType
|
||||||
|
from src.metrics import AUCMetric
|
||||||
|
from src.config import WideDeepConfig
|
||||||
|
|
||||||
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=True)
|
||||||
|
context.set_auto_parallel_context(parallel_mode=ParallelMode.SEMI_AUTO_PARALLEL, mirror_mean=True)
|
||||||
|
cost_model_context.set_cost_model_context(multi_subgraphs=True)
|
||||||
|
init()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_WideDeep_net(config):
|
||||||
|
WideDeep_net = WideDeepModel(config)
|
||||||
|
loss_net = NetWithLossClass(WideDeep_net, config)
|
||||||
|
loss_net = VirtualDatasetCellTriple(loss_net)
|
||||||
|
train_net = TrainStepWrap(loss_net)
|
||||||
|
eval_net = PredictWithSigmoid(WideDeep_net)
|
||||||
|
eval_net = VirtualDatasetCellTriple(eval_net)
|
||||||
|
return train_net, eval_net
|
||||||
|
|
||||||
|
|
||||||
|
class ModelBuilder():
|
||||||
|
"""
|
||||||
|
ModelBuilder
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_hook(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_train_hook(self):
|
||||||
|
hooks = []
|
||||||
|
callback = LossCallBack()
|
||||||
|
hooks.append(callback)
|
||||||
|
if int(os.getenv('DEVICE_ID')) == 0:
|
||||||
|
pass
|
||||||
|
return hooks
|
||||||
|
|
||||||
|
def get_net(self, config):
|
||||||
|
return get_WideDeep_net(config)
|
||||||
|
|
||||||
|
|
||||||
|
def test_train_eval():
|
||||||
|
"""
|
||||||
|
test_train_eval
|
||||||
|
"""
|
||||||
|
config = WideDeepConfig()
|
||||||
|
data_path = config.data_path
|
||||||
|
batch_size = config.batch_size
|
||||||
|
epochs = config.epochs
|
||||||
|
print("epochs is {}".format(epochs))
|
||||||
|
ds_train = create_dataset(data_path, train_mode=True, epochs=epochs, batch_size=batch_size,
|
||||||
|
data_type=DataType.MINDRECORD, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1, batch_size=batch_size,
|
||||||
|
data_type=DataType.MINDRECORD, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
print("ds_train.size: {}".format(ds_train.get_dataset_size()))
|
||||||
|
print("ds_eval.size: {}".format(ds_eval.get_dataset_size()))
|
||||||
|
|
||||||
|
net_builder = ModelBuilder()
|
||||||
|
|
||||||
|
train_net, eval_net = net_builder.get_net(config)
|
||||||
|
train_net.set_train()
|
||||||
|
auc_metric = AUCMetric()
|
||||||
|
|
||||||
|
model = Model(train_net, eval_network=eval_net, metrics={"auc": auc_metric})
|
||||||
|
|
||||||
|
eval_callback = EvalCallBack(model, ds_eval, auc_metric, config)
|
||||||
|
|
||||||
|
callback = LossCallBack(config=config)
|
||||||
|
context.set_auto_parallel_context(strategy_ckpt_save_file="./strategy_train.ckpt")
|
||||||
|
model.train(epochs, ds_train,
|
||||||
|
callbacks=[TimeMonitor(ds_train.get_dataset_size()), eval_callback, callback])
|
||||||
|
eval_values = list(eval_callback.eval_values)
|
||||||
|
assert eval_values[0] > 0.78
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_train_eval()
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,65 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
set -e
|
||||||
|
BASE_PATH=$(cd "$(dirname $0)"; pwd)
|
||||||
|
CONFIG_PATH=/home/workspace/mindspore_config
|
||||||
|
export DEVICE_NUM=8
|
||||||
|
export RANK_SIZE=$DEVICE_NUM
|
||||||
|
unset SLOG_PRINT_TO_STDOUT
|
||||||
|
export MINDSPORE_HCCL_CONFIG_PATH=$CONFIG_PATH/hccl/rank_table_${DEVICE_NUM}p.json
|
||||||
|
CODE_DIR="./"
|
||||||
|
if [ -d ${BASE_PATH}/../../../../model_zoo/wide_and_deep ]; then
|
||||||
|
CODE_DIR=${BASE_PATH}/../../../../model_zoo/wide_and_deep
|
||||||
|
elif [ -d ${BASE_PATH}/../../model_zoo/wide_and_deep ]; then
|
||||||
|
CODE_DIR=${BASE_PATH}/../../model_zoo/wide_and_deep
|
||||||
|
else
|
||||||
|
echo "[ERROR] code dir is not found"
|
||||||
|
fi
|
||||||
|
echo $CODE_DIR
|
||||||
|
rm -rf ${BASE_PATH}/wide_and_deep
|
||||||
|
cp -r ${CODE_DIR} ${BASE_PATH}/wide_and_deep
|
||||||
|
cp -f ${BASE_PATH}/python_file_for_ci/train_and_test_multinpu_ci.py ${BASE_PATH}/wide_and_deep/train_and_test_multinpu_ci.py
|
||||||
|
cp -f ${BASE_PATH}/python_file_for_ci/__init__.py ${BASE_PATH}/wide_and_deep/__init__.py
|
||||||
|
cp -f ${BASE_PATH}/python_file_for_ci/config.py ${BASE_PATH}/wide_and_deep/src/config.py
|
||||||
|
cp -f ${BASE_PATH}/python_file_for_ci/datasets.py ${BASE_PATH}/wide_and_deep/src/datasets.py
|
||||||
|
cp -f ${BASE_PATH}/python_file_for_ci/wide_and_deep.py ${BASE_PATH}/wide_and_deep/src/wide_and_deep.py
|
||||||
|
source ${BASE_PATH}/env.sh
|
||||||
|
export PYTHONPATH=${BASE_PATH}/wide_and_deep/:$PYTHONPATH
|
||||||
|
process_pid=()
|
||||||
|
for((i=0; i<$DEVICE_NUM; i++)); do
|
||||||
|
rm -rf ${BASE_PATH}/wide_and_deep_auto_parallel${i}
|
||||||
|
mkdir ${BASE_PATH}/wide_and_deep_auto_parallel${i}
|
||||||
|
cd ${BASE_PATH}/wide_and_deep_auto_parallel${i}
|
||||||
|
export RANK_ID=${i}
|
||||||
|
export DEVICE_ID=${i}
|
||||||
|
echo "start training for device $i"
|
||||||
|
env > env$i.log
|
||||||
|
pytest -s -v ../wide_and_deep/train_and_test_multinpu_ci.py > train_and_test_multinpu_ci$i.log 2>&1 &
|
||||||
|
process_pid[${i}]=`echo $!`
|
||||||
|
done
|
||||||
|
|
||||||
|
for((i=0; i<${DEVICE_NUM}; i++)); do
|
||||||
|
wait ${process_pid[i]}
|
||||||
|
status=`echo $?`
|
||||||
|
if [ "${status}" != "0" ]; then
|
||||||
|
echo "[ERROR] test wide_and_deep semi auto parallel failed. status: ${status}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "[INFO] test wide_and_deep semi auto parallel success."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
@ -0,0 +1,27 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.level0
|
||||||
|
@pytest.mark.platform_x86_ascend_training
|
||||||
|
@pytest.mark.platform_arm_ascend_training
|
||||||
|
@pytest.mark.env_single
|
||||||
|
def test_wide_and_deep():
|
||||||
|
sh_path = os.path.split(os.path.realpath(__file__))[0]
|
||||||
|
ret = os.system(f"sh {sh_path}/run_wide_and_deep_auto_parallel.sh")
|
||||||
|
os.system(f"grep -E 'ERROR|error' {sh_path}/wide_and_deep_auto_parallel*/train*log -C 3")
|
||||||
|
assert ret == 0
|
@ -0,0 +1,114 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
"""train_multinpu."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import numpy as np
|
||||||
|
from mindspore import Model, context
|
||||||
|
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor
|
||||||
|
from mindspore.train import ParallelMode
|
||||||
|
from mindspore.communication.management import get_rank, get_group_size, init
|
||||||
|
|
||||||
|
from src.wide_and_deep import PredictWithSigmoid, TrainStepWrap, NetWithLossClass, WideDeepModel
|
||||||
|
from src.callbacks import LossCallBack, EvalCallBack
|
||||||
|
from src.datasets import create_dataset
|
||||||
|
from src.metrics import AUCMetric
|
||||||
|
from src.config import WideDeepConfig
|
||||||
|
|
||||||
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=True)
|
||||||
|
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, mirror_mean=True)
|
||||||
|
init()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_WideDeep_net(config):
|
||||||
|
WideDeep_net = WideDeepModel(config)
|
||||||
|
loss_net = NetWithLossClass(WideDeep_net, config)
|
||||||
|
train_net = TrainStepWrap(loss_net)
|
||||||
|
eval_net = PredictWithSigmoid(WideDeep_net)
|
||||||
|
return train_net, eval_net
|
||||||
|
|
||||||
|
|
||||||
|
class ModelBuilder():
|
||||||
|
"""
|
||||||
|
ModelBuilder
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_hook(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_train_hook(self):
|
||||||
|
hooks = []
|
||||||
|
callback = LossCallBack()
|
||||||
|
hooks.append(callback)
|
||||||
|
if int(os.getenv('DEVICE_ID')) == 0:
|
||||||
|
pass
|
||||||
|
return hooks
|
||||||
|
|
||||||
|
def get_net(self, config):
|
||||||
|
return get_WideDeep_net(config)
|
||||||
|
|
||||||
|
|
||||||
|
def test_train_eval():
|
||||||
|
"""
|
||||||
|
test_train_eval
|
||||||
|
"""
|
||||||
|
np.random.seed(1000)
|
||||||
|
config = WideDeepConfig()
|
||||||
|
data_path = config.data_path
|
||||||
|
batch_size = config.batch_size
|
||||||
|
epochs = config.epochs
|
||||||
|
print("epochs is {}".format(epochs))
|
||||||
|
ds_train = create_dataset(data_path, train_mode=True, epochs=epochs,
|
||||||
|
batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
ds_eval = create_dataset(data_path, train_mode=False, epochs=epochs + 1,
|
||||||
|
batch_size=batch_size, rank_id=get_rank(), rank_size=get_group_size())
|
||||||
|
print("ds_train.size: {}".format(ds_train.get_dataset_size()))
|
||||||
|
print("ds_eval.size: {}".format(ds_eval.get_dataset_size()))
|
||||||
|
|
||||||
|
net_builder = ModelBuilder()
|
||||||
|
|
||||||
|
train_net, eval_net = net_builder.get_net(config)
|
||||||
|
train_net.set_train()
|
||||||
|
auc_metric = AUCMetric()
|
||||||
|
|
||||||
|
model = Model(train_net, eval_network=eval_net, metrics={"auc": auc_metric})
|
||||||
|
|
||||||
|
eval_callback = EvalCallBack(model, ds_eval, auc_metric, config)
|
||||||
|
|
||||||
|
callback = LossCallBack(config=config)
|
||||||
|
ckptconfig = CheckpointConfig(save_checkpoint_steps=ds_train.get_dataset_size(), keep_checkpoint_max=5)
|
||||||
|
ckpoint_cb = ModelCheckpoint(prefix='widedeep_train',
|
||||||
|
directory=config.ckpt_path, config=ckptconfig)
|
||||||
|
out = model.eval(ds_eval)
|
||||||
|
print("=====" * 5 + "model.eval() initialized: {}".format(out))
|
||||||
|
model.train(epochs, ds_train,
|
||||||
|
callbacks=[TimeMonitor(ds_train.get_dataset_size()), eval_callback, callback, ckpoint_cb])
|
||||||
|
expect_out0 = [0.792634,0.799862,0.803324]
|
||||||
|
expect_out6 = [0.796580,0.803908,0.807262]
|
||||||
|
if get_rank() == 0:
|
||||||
|
assert np.allclose(eval_callback.eval_values, expect_out0)
|
||||||
|
if get_rank() == 6:
|
||||||
|
assert np.allclose(eval_callback.eval_values, expect_out6)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
test_train_eval()
|
Loading…
Reference in new issue