api build strategy error polish, test=develop (#23546)

revert-23830-2.0-beta
Chen Weihang 5 years ago committed by GitHub
parent b11340a437
commit df538439f5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1863,8 +1863,10 @@ All parameter, weight, gradient are variables in Paddle.
"reduce_strategy", "reduce_strategy",
[](const BuildStrategy &self) { return self.reduce_; }, [](const BuildStrategy &self) { return self.reduce_; },
[](BuildStrategy &self, BuildStrategy::ReduceStrategy strategy) { [](BuildStrategy &self, BuildStrategy::ReduceStrategy strategy) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.reduce_ = strategy; self.reduce_ = strategy;
}, },
R"DOC((fluid.BuildStrategy.ReduceStrategy, optional): there are two reduce R"DOC((fluid.BuildStrategy.ReduceStrategy, optional): there are two reduce
@ -1887,8 +1889,10 @@ All parameter, weight, gradient are variables in Paddle.
[](const BuildStrategy &self) { return self.gradient_scale_; }, [](const BuildStrategy &self) { return self.gradient_scale_; },
[](BuildStrategy &self, [](BuildStrategy &self,
BuildStrategy::GradientScaleStrategy strategy) { BuildStrategy::GradientScaleStrategy strategy) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finalized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.gradient_scale_ = strategy; self.gradient_scale_ = strategy;
}, },
R"DOC((fluid.BuildStrategy.GradientScaleStrategy, optional): there are three R"DOC((fluid.BuildStrategy.GradientScaleStrategy, optional): there are three
@ -1949,8 +1953,10 @@ All parameter, weight, gradient are variables in Paddle.
"debug_graphviz_path", "debug_graphviz_path",
[](const BuildStrategy &self) { return self.debug_graphviz_path_; }, [](const BuildStrategy &self) { return self.debug_graphviz_path_; },
[](BuildStrategy &self, const std::string &path) { [](BuildStrategy &self, const std::string &path) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.debug_graphviz_path_ = path; self.debug_graphviz_path_ = path;
}, },
R"DOC((str, optional): debug_graphviz_path indicates the path that R"DOC((str, optional): debug_graphviz_path indicates the path that
@ -1971,8 +1977,10 @@ All parameter, weight, gradient are variables in Paddle.
return self.enable_sequential_execution_; return self.enable_sequential_execution_;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.enable_sequential_execution_ = b; self.enable_sequential_execution_ = b;
}, },
R"DOC((bool, optional): If set True, the execution order of ops would R"DOC((bool, optional): If set True, the execution order of ops would
@ -1991,8 +1999,10 @@ All parameter, weight, gradient are variables in Paddle.
return self.remove_unnecessary_lock_; return self.remove_unnecessary_lock_;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.remove_unnecessary_lock_ = b; self.remove_unnecessary_lock_ = b;
}, },
R"DOC((bool, optional): If set True, some locks in GPU ops would be R"DOC((bool, optional): If set True, some locks in GPU ops would be
@ -2010,7 +2020,8 @@ All parameter, weight, gradient are variables in Paddle.
[](const BuildStrategy &self) { return self.num_trainers_; }, [](const BuildStrategy &self) { return self.num_trainers_; },
[](BuildStrategy &self, int num_trainers) { [](BuildStrategy &self, int num_trainers) {
#ifdef WIN32 #ifdef WIN32
PADDLE_THROW("Windows has NO support to distribute mode."); PADDLE_THROW(platform::errors::Unavailable(
"Windows has NO support to distribute mode."));
#endif #endif
self.num_trainers_ = num_trainers; self.num_trainers_ = num_trainers;
}) })
@ -2053,8 +2064,10 @@ All parameter, weight, gradient are variables in Paddle.
return self.fuse_elewise_add_act_ops_; return self.fuse_elewise_add_act_ops_;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.fuse_elewise_add_act_ops_ = b; self.fuse_elewise_add_act_ops_ = b;
}, },
R"DOC((bool, optional): fuse_elewise_add_act_ops indicate whether R"DOC((bool, optional): fuse_elewise_add_act_ops indicate whether
@ -2072,9 +2085,10 @@ All parameter, weight, gradient are variables in Paddle.
"fuse_bn_act_ops", "fuse_bn_act_ops",
[](const BuildStrategy &self) { return self.fuse_bn_act_ops_; }, [](const BuildStrategy &self) { return self.fuse_bn_act_ops_; },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
platform::errors::PreconditionNotMet( platform::errors::PreconditionNotMet(
"BuildStrategy is finlaized.")); "BuildStrategy has been finlaized, cannot be "
"configured again."));
self.fuse_bn_act_ops_ = b; self.fuse_bn_act_ops_ = b;
}, },
R"DOC((bool, optional): fuse_bn_act_ops indicate whether R"DOC((bool, optional): fuse_bn_act_ops indicate whether
@ -2092,9 +2106,10 @@ All parameter, weight, gradient are variables in Paddle.
"enable_auto_fusion", "enable_auto_fusion",
[](const BuildStrategy &self) { return self.enable_auto_fusion_; }, [](const BuildStrategy &self) { return self.enable_auto_fusion_; },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
platform::errors::PreconditionNotMet( platform::errors::PreconditionNotMet(
"BuildStrategy is finlaized.")); "BuildStrategy has been finlaized, cannot be "
"configured again."));
self.enable_auto_fusion_ = b; self.enable_auto_fusion_ = b;
}, },
R"DOC((bool, optional): Whether to enable fusing subgraph to a R"DOC((bool, optional): Whether to enable fusing subgraph to a
@ -2115,8 +2130,10 @@ All parameter, weight, gradient are variables in Paddle.
return self.fuse_relu_depthwise_conv_; return self.fuse_relu_depthwise_conv_;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.fuse_relu_depthwise_conv_ = b; self.fuse_relu_depthwise_conv_ = b;
}, },
R"DOC((bool, optional): fuse_relu_depthwise_conv indicate whether R"DOC((bool, optional): fuse_relu_depthwise_conv indicate whether
@ -2138,8 +2155,10 @@ All parameter, weight, gradient are variables in Paddle.
self.fuse_broadcast_ops_ == boost::none; self.fuse_broadcast_ops_ == boost::none;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, "
"cannot be configured again."));
self.fuse_broadcast_ops_ = b; self.fuse_broadcast_ops_ = b;
}, },
R"DOC((bool, optional): fuse_broadcast_op indicates whether R"DOC((bool, optional): fuse_broadcast_op indicates whether
@ -2162,16 +2181,20 @@ All parameter, weight, gradient are variables in Paddle.
self.fuse_all_optimizer_ops_ == boost::none; self.fuse_all_optimizer_ops_ == boost::none;
}, },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, "
"cannot be configured again."));
self.fuse_all_optimizer_ops_ = b; self.fuse_all_optimizer_ops_ = b;
}) })
.def_property( .def_property(
"sync_batch_norm", "sync_batch_norm",
[](const BuildStrategy &self) { return self.sync_batch_norm_; }, [](const BuildStrategy &self) { return self.sync_batch_norm_; },
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
PADDLE_ENFORCE_EQ(!self.IsFinalized(), true, PADDLE_ENFORCE_NE(self.IsFinalized(), true,
"BuildStrategy is finlaized."); platform::errors::PreconditionNotMet(
"BuildStrategy has been finlaized, cannot be "
"configured again."));
self.sync_batch_norm_ = b; self.sync_batch_norm_ = b;
}, },
R"DOC((bool, optional): sync_batch_norm indicates whether to use R"DOC((bool, optional): sync_batch_norm indicates whether to use
@ -2204,9 +2227,9 @@ All parameter, weight, gradient are variables in Paddle.
} else if (PyBool_Check(py_obj)) { } else if (PyBool_Check(py_obj)) {
self.memory_optimize_ = (py_obj == Py_True); self.memory_optimize_ = (py_obj == Py_True);
} else { } else {
PADDLE_THROW( PADDLE_THROW(platform::errors::InvalidArgument(
"BuildStrategy.memory_optimize must be None, False or " "BuildStrategy.memory_optimize must be set to None, False or "
"True"); "True"));
} }
}, },
R"DOC((bool, optional): memory opitimize aims to save total memory R"DOC((bool, optional): memory opitimize aims to save total memory
@ -2222,7 +2245,8 @@ All parameter, weight, gradient are variables in Paddle.
[](BuildStrategy &self, bool b) { [](BuildStrategy &self, bool b) {
#ifdef WIN32 #ifdef WIN32
if (b) { if (b) {
PADDLE_THROW("Windows has NO support to distribute mode."); PADDLE_THROW(platform::errors::Unavailable(
"Windows has NO support to distribute mode."));
} }
#else #else
self.is_distribution_ = b; self.is_distribution_ = b;

Loading…
Cancel
Save