|
|
|
@ -6,6 +6,7 @@
|
|
|
|
|
USE_OP(add_two);
|
|
|
|
|
USE_OP(mul);
|
|
|
|
|
USE_OP(sigmoid);
|
|
|
|
|
USE_OP(softmax);
|
|
|
|
|
|
|
|
|
|
namespace paddle {
|
|
|
|
|
namespace framework {
|
|
|
|
@ -75,16 +76,21 @@ TEST(AddBackwardOp, TestGradOp) {
|
|
|
|
|
net->AddOp(
|
|
|
|
|
framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {"Out"}, {}));
|
|
|
|
|
net->AddOp(framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {""}, {}));
|
|
|
|
|
// net->AddOp(framework::OpRegistry::CreateOp("fc"), {
|
|
|
|
|
// Input("X"), Input("W"), Input("b")},
|
|
|
|
|
// {Output("Y")},
|
|
|
|
|
// {}
|
|
|
|
|
// );
|
|
|
|
|
auto grad_ops = AddBackwardOp(net);
|
|
|
|
|
for (auto& op : grad_ops->ops_) {
|
|
|
|
|
op->DebugString();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO(zhihong): add fc grad without registering.
|
|
|
|
|
// TEST(AddBackwardOp, TestNoGradOp) {
|
|
|
|
|
// auto net = std::make_shared<PlainNet>();
|
|
|
|
|
// ASSERT_NE(net, nullptr);
|
|
|
|
|
// net->AddOp(framework::OpRegistry::CreateOp("fc", {"X", "W", "b"}, {"Y"},
|
|
|
|
|
// {})); auto grad_ops = AddBackwardOp(net); for (auto& op : grad_ops->ops_) {
|
|
|
|
|
// op->DebugString();
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
} // namespace framework
|
|
|
|
|
} // namespace paddle
|
|
|
|
|