Fine tuning launch.py (#17223)
parent
841553e13f
commit
6a1df46991
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def train():
|
||||
selected_gpus = os.getenv("FLAGS_selected_gpus")
|
||||
trainer_id = int(os.getenv("PADDLE_TRAINER_ID"))
|
||||
worker_endpoints_env = os.getenv("PADDLE_TRAINER_ENDPOINTS")
|
||||
current_endpoint = os.getenv("PADDLE_CURRENT_ENDPOINT")
|
||||
worker_endpoints = worker_endpoints_env.split(",")
|
||||
trainers_num = len(worker_endpoints)
|
||||
|
||||
name = "selected_gpus:{} worker_endpoints:{} trainers_num:{} current_endpoint:{} trainer_id:{}"\
|
||||
.format(selected_gpus, worker_endpoints, trainers_num, current_endpoint,trainer_id)
|
||||
|
||||
print(name)
|
||||
with open("multi_process.check.log", "w") as f:
|
||||
f.write(name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
train()
|
@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# use default values
|
||||
python -m paddle.distributed.launch multi_process.py
|
||||
|
||||
# use specified values
|
||||
cluster_node_ips="127.0.0.1"
|
||||
node_ip="127.0.0.1"
|
||||
|
||||
distributed_args="--cluster_node_ips ${cluster_node_ips} --node_ip ${node_ip} --selected_gpus=0,1"
|
||||
python -m paddle.distributed.launch ${distributed_args} multi_process.py
|
||||
|
||||
str1="selected_gpus:0 worker_endpoints:['127.0.0.1:6170', '127.0.0.1:6171'] trainers_num:2 current_endpoint:127.0.0.1:6170 trainer_id:0"
|
||||
str2="selected_gpus:1 worker_endpoints:['127.0.0.1:6170', '127.0.0.1:6171'] trainers_num:2 current_endpoint:127.0.0.1:6171 trainer_id:1"
|
||||
file="multi_process.check.log"
|
||||
|
||||
if ! grep -q "$str1" "$file"; then
|
||||
echo "find trainer 0"
|
||||
else
|
||||
echo "not find trainer 0"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if ! grep -q "$str2" "$file"; then
|
||||
echo "find trainer 1"
|
||||
else
|
||||
echo "not find trainer 0"
|
||||
exit -1
|
||||
fi
|
Loading…
Reference in new issue