Remove psutil dependency. (#26841)

revert-26856-strategy_example2
gongweibao 5 years ago committed by GitHub
parent 0627a319b0
commit 4e1c8f3e6e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -15,7 +15,8 @@ from multiprocessing import Pool, Process
import os
import socket
from contextlib import closing
import psutil
import time
import sys
def launch_func(func, env_dict):
@ -25,19 +26,36 @@ def launch_func(func, env_dict):
return proc
def wait(procs, timeout=None):
# wait
decents = []
def wait(procs, timeout=30):
error = False
begin = time.time()
while True:
alive = False
for p in procs:
p.join(timeout=10)
if p.exitcode is None:
alive = True
continue
elif p.exitcode != 0:
error = True
break
if not alive:
break
if error:
break
if timeout is not None and time.time() - begin >= timeout:
error = True
break
for p in procs:
for child in psutil.Process(p.pid).children(recursive=True):
decents.append(child)
gone, alive = psutil.wait_procs(decents, timeout=timeout)
for p in alive:
p.kill()
for p in gone:
if p.returncode != 0:
sys.exit(1)
if p.is_alive():
p.terminate()
if error:
sys.exit(1)
def _find_free_port(port_set):

@ -190,7 +190,7 @@ class TestFleetGraphExecutionMetaOptimizer(unittest.TestCase):
avg_cost = paddle.fluid.layers.mean(x=cost)
strategy = paddle.distributed.fleet.DistributedStrategy()
optimizer = paddle.optimizer.SGD(learning_rate=0.01)
optimizer = paddle.fluid.optimizer.SGD(learning_rate=0.01)
optimizer = fleet.distributed_optimizer(
optimizer, strategy=strategy)
optimizer.minimize(avg_cost)

@ -21,4 +21,3 @@ objgraph
astor
pathlib
netifaces
psutil

Loading…
Cancel
Save