|
|
|
@ -300,7 +300,7 @@ class DistributeTranspiler(object):
|
|
|
|
|
input_deps = grad_name_to_send_dummy_out.values()
|
|
|
|
|
program.global_block().append_op(
|
|
|
|
|
type="send_barrier",
|
|
|
|
|
inputs={"X": input_deps},
|
|
|
|
|
inputs={"X": list(input_deps)},
|
|
|
|
|
outputs={"Out": send_barrier_out},
|
|
|
|
|
attrs={
|
|
|
|
|
"endpoints": pserver_endpoints,
|
|
|
|
@ -401,7 +401,7 @@ class DistributeTranspiler(object):
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
recv_vars (list): Variable list to recv for current trainer_id
|
|
|
|
|
eplist (list): A list of strings indicating
|
|
|
|
|
eplist (list): A list of strings indicating
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Program: trainer side startup program.
|
|
|
|
@ -455,7 +455,7 @@ class DistributeTranspiler(object):
|
|
|
|
|
if len(splited_var) <= 1:
|
|
|
|
|
continue
|
|
|
|
|
# NOTE: if enable memory optimization, origin vars maybe removed.
|
|
|
|
|
if startup_program.global_block().vars.has_key(varname):
|
|
|
|
|
if varname in startup_program.global_block().vars:
|
|
|
|
|
orig_param = startup_program.global_block().vars[varname]
|
|
|
|
|
else:
|
|
|
|
|
origin_param_var = self.origin_program.global_block().vars[
|
|
|
|
@ -690,7 +690,7 @@ class DistributeTranspiler(object):
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
endpoint (str): current pserver endpoint.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
tuple: (main_program, startup_program), of type "Program"
|
|
|
|
|
"""
|
|
|
|
@ -713,7 +713,7 @@ class DistributeTranspiler(object):
|
|
|
|
|
endpoint (str): current pserver endpoint.
|
|
|
|
|
pserver_program (Program): deprecated, call get_pserver_program first.
|
|
|
|
|
startup_program (Program): deprecated, should pass startup_program
|
|
|
|
|
when initalizing
|
|
|
|
|
when initalizing
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Program: parameter server side startup program.
|
|
|
|
|