From 151f579c23c75789726c07b522a45ea6d78670e2 Mon Sep 17 00:00:00 2001
From: Helin Wang <ustc.harry@gmail.com>
Date: Wed, 17 Jan 2018 14:48:52 -0800
Subject: [PATCH] Recv OP: use BlockDesc* instread of ProgramDesc proto as
 Attribute

---
 paddle/operators/recv_op.cc                        | 14 +++++++-------
 paddle/operators/send_recv_op_test.cc              |  5 +----
 python/paddle/v2/fluid/distribute_transpiler.py    |  2 +-
 .../v2/fluid/distribute_transpiler_simple.py       |  2 +-
 4 files changed, 10 insertions(+), 13 deletions(-)

diff --git a/paddle/operators/recv_op.cc b/paddle/operators/recv_op.cc
index f9ed751682..ba0d805110 100644
--- a/paddle/operators/recv_op.cc
+++ b/paddle/operators/recv_op.cc
@@ -33,6 +33,8 @@ limitations under the License. */
 namespace paddle {
 namespace operators {
 
+constexpr char kOptimizeBlock[] = "OptimizeBlock";
+
 void RunServer(std::shared_ptr<detail::AsyncGRPCServer> service) {
   service->RunSyncUpdate();
   VLOG(4) << "RunServer thread end";
@@ -150,14 +152,12 @@ class RecvOp : public framework::OperatorBase {
 
       rpc_service_->Reset();
 
-      std::string program_str = Attr<std::string>("OptimizeProgram");
-      framework::proto::ProgramDesc program_desc;
-      program_desc.ParseFromString(program_str);
-      framework::ProgramDesc program(program_desc);
+      auto *block = Attr<framework::BlockDesc *>(kOptimizeBlock);
+      auto *program = block->Program();
       framework::Executor executor(dev_place);
       // Run sub graph to get optimized tensor
       try {
-        executor.Run(program, &recv_scope, 0, /*global_block*/
+        executor.Run(*program, &recv_scope, block->ID(), /*global_block*/
                      false /*create_local_scope*/, false /*create_vars*/);
       } catch (std::exception &e) {
         LOG(ERROR) << "run sub program error " << e.what();
@@ -189,8 +189,8 @@ This operator will recv tensor from send_op
                          "IP address to listen on.")
         .SetDefault("127.0.0.1:6164")
         .AddCustomChecker([](const std::string &ip) { return !ip.empty(); });
-    AddAttr<std::string>("OptimizeProgram", "type string",
-                         "Serialized ProgramDesc string for recv to run.");
+    AddAttr<framework::BlockDesc *>(
+        kOptimizeBlock, "Serialized ProgramDesc string for recv to run.");
     AddAttr<std::vector<std::string>>(
         "ParamList", "type list of string",
         "grad->param name mapping to find which param to optimize.")
diff --git a/paddle/operators/send_recv_op_test.cc b/paddle/operators/send_recv_op_test.cc
index ea09169479..045a0f5434 100644
--- a/paddle/operators/send_recv_op_test.cc
+++ b/paddle/operators/send_recv_op_test.cc
@@ -130,10 +130,7 @@ void StartServerNet(bool is_sparse) {
   attrs.insert({"endpoint", std::string("127.0.0.1:6174")});
   attrs.insert({"ParamList", std::vector<std::string>({"Out"})});
   attrs.insert({"GradList", std::vector<std::string>({"x1"})});
-  std::string program_proto;
-  PADDLE_ENFORCE(program.Proto()->SerializeToString(&program_proto));
-
-  attrs.insert({"OptimizeProgram", program_proto});
+  attrs.insert({"OptimizeBlock", block});
   recv_op = f::OpRegistry::CreateOp("recv", {{"RX", {"x1"}}}, {}, attrs);
   recv_op->Run(scope, place);
 }
diff --git a/python/paddle/v2/fluid/distribute_transpiler.py b/python/paddle/v2/fluid/distribute_transpiler.py
index 06a7b6fb02..e1b7e341d0 100644
--- a/python/paddle/v2/fluid/distribute_transpiler.py
+++ b/python/paddle/v2/fluid/distribute_transpiler.py
@@ -439,7 +439,7 @@ class DistributeTranspiler:
                     },  # grads to recv
             outputs={},
             attrs={
-                "OptimizeProgram": optimize_sub_program.desc,
+                "OptimizeBlock": optimize_sub_program.global_block(),
                 "endpoint": endpoint,
                 "ParamList": [
                     p.name
diff --git a/python/paddle/v2/fluid/distribute_transpiler_simple.py b/python/paddle/v2/fluid/distribute_transpiler_simple.py
index bd88f02bde..56ffb56b12 100644
--- a/python/paddle/v2/fluid/distribute_transpiler_simple.py
+++ b/python/paddle/v2/fluid/distribute_transpiler_simple.py
@@ -243,7 +243,7 @@ class SimpleDistributeTranspiler:
                     self.param_grad_map[endpoint]["grads"]},  # grads to recv
             outputs={},
             attrs={
-                "OptimizeProgram": optimize_sub_program.desc,
+                "OptimizeBlock": optimize_sub_program.global_block(),
                 "endpoint": endpoint,
                 "ParamList":
                 [p.name for p in self.param_grad_map[endpoint]["params"]],