Merge pull request #13104 from jacquesqiao/fix-sparse-grad-merge

fix sparse grad merge on pserver
fix-deadlinks-in-readme
Qiao Longfei 7 years ago committed by GitHub
commit 91422138d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -438,7 +438,7 @@ class TestLocalLookupTable(TestDistLookupTableBase):
# 2 optimize for table adam # 2 optimize for table adam
# NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num # NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num
self.assertEqual([op.type for op in pserver1.blocks[2].ops], self.assertEqual([op.type for op in pserver1.blocks[2].ops],
["sum", "adam", "scale", "scale"]) ["sum", "scale", "adam", "scale", "scale"])
trainer, _ = self.get_trainer() trainer, _ = self.get_trainer()
self.assertEqual(len(trainer.blocks), 1) self.assertEqual(len(trainer.blocks), 1)

@ -1390,13 +1390,11 @@ class DistributeTranspiler(object):
inputs={"X": vars2merge}, inputs={"X": vars2merge},
outputs={"Out": merged_var}, outputs={"Out": merged_var},
attrs={"use_mkldnn": False}) attrs={"use_mkldnn": False})
# TODO(panyx0718): What if it's SELECTED_ROWS. optimize_block.append_op(
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS: type="scale",
optimize_block.append_op( inputs={"X": merged_var},
type="scale", outputs={"Out": merged_var},
inputs={"X": merged_var}, attrs={"scale": 1.0 / float(self.trainer_num)})
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainer_num)})
return merged_var return merged_var
def _append_pserver_ops(self, optimize_block, opt_op, endpoint, def _append_pserver_ops(self, optimize_block, opt_op, endpoint,

Loading…
Cancel
Save