parent
cffe2c94fe
commit
1d9d3b123c
@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
LOCAL_HIAI=/usr/local/HiAI
|
||||||
|
export TBE_IMPL_PATH=${LOCAL_HIAI}/runtime/ops/op_impl/built-in/ai_core/tbe/impl/
|
||||||
|
export LD_LIBRARY_PATH=${LOCAL_HIAI}/runtime/lib64/:${LD_LIBRARY_PATH}
|
||||||
|
export PATH=${LOCAL_HIAI}/runtime/ccec_compiler/bin/:${PATH}
|
||||||
|
export PYTHONPATH=${LOCAL_HIAI}/runtime/ops/op_impl/built-in/ai_core/tbe/:${PYTHONPATH}
|
||||||
|
export DEVICE_MEMORY_CAPACITY=1073741824000
|
||||||
|
export NOT_FULLY_USE_DEVICES=off
|
@ -0,0 +1,61 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
import mindspore.communication.management as distributedTool
|
||||||
|
import mindspore.nn as nn
|
||||||
|
from mindspore import context
|
||||||
|
from mindspore.nn.metrics import Accuracy
|
||||||
|
from mindspore.train import Model
|
||||||
|
from mindspore.train.callback import LossMonitor, TimeMonitor
|
||||||
|
from model_zoo.official.cv.lenet.src.dataset import create_dataset
|
||||||
|
from model_zoo.official.cv.lenet.src.lenet import LeNet5
|
||||||
|
|
||||||
|
np.set_printoptions(threshold=np.inf)
|
||||||
|
device_num = 2
|
||||||
|
device_id = int(os.getenv('DEVICE_ID'))
|
||||||
|
rank_id = 0
|
||||||
|
|
||||||
|
|
||||||
|
def setup_module():
|
||||||
|
global device_num
|
||||||
|
global rank_id
|
||||||
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||||
|
context.set_context(device_id=device_id)
|
||||||
|
distributedTool.init()
|
||||||
|
rank_id = distributedTool.get_rank()
|
||||||
|
device_num = distributedTool.get_group_size()
|
||||||
|
context.set_auto_parallel_context(device_num=device_num, global_rank=device_id, parameter_broadcast=True)
|
||||||
|
|
||||||
|
|
||||||
|
def teardown_module():
|
||||||
|
distributedTool.release()
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_trains():
|
||||||
|
ds_train = create_dataset(os.path.join('/home/workspace/mindspore_dataset/mnist', "train"), 32, 1)
|
||||||
|
|
||||||
|
network = LeNet5(10)
|
||||||
|
net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
||||||
|
net_opt = nn.Momentum(network.trainable_params(), 0.01, 0.9)
|
||||||
|
time_cb = TimeMonitor(data_size=ds_train.get_dataset_size())
|
||||||
|
|
||||||
|
model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})
|
||||||
|
|
||||||
|
print("============== Starting Training ==============")
|
||||||
|
model.train(1, ds_train, callbacks=[time_cb, LossMonitor()])
|
@ -0,0 +1,53 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
set -e
|
||||||
|
BASE_PATH=$(
|
||||||
|
cd "$(dirname $0)"
|
||||||
|
pwd
|
||||||
|
)
|
||||||
|
CONFIG_PATH=/home/workspace/mindspore_config
|
||||||
|
export DEVICE_NUM=8
|
||||||
|
export RANK_SIZE=$DEVICE_NUM
|
||||||
|
source ${BASE_PATH}/env.sh
|
||||||
|
unset SLOG_PRINT_TO_STDOUT
|
||||||
|
export MINDSPORE_HCCL_CONFIG_PATH=$CONFIG_PATH/hccl/rank_table_${DEVICE_NUM}p.json
|
||||||
|
|
||||||
|
process_pid=()
|
||||||
|
for ((i = 0; i < $DEVICE_NUM; i++)); do
|
||||||
|
rm -rf ${BASE_PATH}/lenet_broadcast${i}
|
||||||
|
mkdir ${BASE_PATH}/lenet_broadcast${i}
|
||||||
|
cp -r ${BASE_PATH}/lenet_broadcast_auto_parallel.py ${BASE_PATH}/lenet_broadcast${i}/
|
||||||
|
cd ${BASE_PATH}/lenet_broadcast${i}
|
||||||
|
export RANK_ID=${i}
|
||||||
|
export DEVICE_ID=${i}
|
||||||
|
echo "start training for device $i"
|
||||||
|
env >env$i.log
|
||||||
|
pytest -s -v lenet_broadcast_auto_parallel.py >test_lenet_auto_parallel_broadcast_8p_log$i.log 2>&1 &
|
||||||
|
process_pid[${i}]=$(echo $!)
|
||||||
|
done
|
||||||
|
|
||||||
|
for ((i = 0; i < ${DEVICE_NUM}; i++)); do
|
||||||
|
wait ${process_pid[i]}
|
||||||
|
status=$(echo $?)
|
||||||
|
if [ "${status}" != "0" ]; then
|
||||||
|
echo "[ERROR] test_broadcast_auto_parallel failed. status: ${status}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "[INFO] test_broadcast_auto_parallel success."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit 0
|
@ -0,0 +1,27 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.level0
|
||||||
|
@pytest.mark.platform_arm_ascend_training
|
||||||
|
@pytest.mark.platform_x86_ascend_training
|
||||||
|
@pytest.mark.env_single
|
||||||
|
def test_broadcast_auto_parallel():
|
||||||
|
sh_path = os.path.split(os.path.realpath(__file__))[0]
|
||||||
|
ret = os.system(f"sh {sh_path}/run_broadcast_auto_parallel.sh")
|
||||||
|
assert ret == 0
|
Loading…
Reference in new issue