|
|
|
@ -394,6 +394,38 @@ class TestDistLookupTableBase(TranspilerTest):
|
|
|
|
|
optimizer.minimize(avg_cost)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestLocalLookupTable(TestDistLookupTableBase):
|
|
|
|
|
def net_conf(self):
|
|
|
|
|
self.network_with_table(is_sparse=True, is_distributed=False)
|
|
|
|
|
|
|
|
|
|
def transpiler_test_impl(self):
|
|
|
|
|
pserver1, startup1 = self.get_pserver(self.pserver1_ep)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(len(pserver1.blocks), 3)
|
|
|
|
|
# print(str(pserver1))
|
|
|
|
|
# 0 listen_and_serv
|
|
|
|
|
# 1 optimize for fc_w or fc_b adam
|
|
|
|
|
self.assertEqual([op.type for op in pserver1.blocks[1].ops],
|
|
|
|
|
["sum", "scale", "adam", "scale", "scale"])
|
|
|
|
|
# 2 optimize for table adam
|
|
|
|
|
# NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num
|
|
|
|
|
self.assertEqual([op.type for op in pserver1.blocks[2].ops],
|
|
|
|
|
["sum", "adam", "scale", "scale"])
|
|
|
|
|
|
|
|
|
|
trainer = self.get_trainer()
|
|
|
|
|
self.assertEqual(len(trainer.blocks), 1)
|
|
|
|
|
ops = [
|
|
|
|
|
'lookup_table', 'sequence_pool', 'lookup_table', 'sequence_pool',
|
|
|
|
|
'concat', 'mul', 'elementwise_add', 'cross_entropy', 'mean',
|
|
|
|
|
'fill_constant', 'mean_grad', 'cross_entropy_grad',
|
|
|
|
|
'elementwise_add_grad', 'send', 'mul_grad', 'send', 'concat_grad',
|
|
|
|
|
'sequence_pool_grad', 'lookup_table_grad', 'sequence_pool_grad',
|
|
|
|
|
'lookup_table_grad', 'sum', 'split_selected_rows', 'send',
|
|
|
|
|
'send_barrier', 'recv', 'recv', 'recv', 'fetch_barrier', 'concat'
|
|
|
|
|
]
|
|
|
|
|
self.assertEqual([op.type for op in trainer.blocks[0].ops], ops)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestDistLookupTable(TestDistLookupTableBase):
|
|
|
|
|
def net_conf(self):
|
|
|
|
|
self.network_with_table(is_sparse=True, is_distributed=True)
|
|
|
|
|