|
|
@ -40,7 +40,7 @@ void AssertSameVectorWithoutOrder(const std::vector<T>& expected,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(OpKernel, all) {
|
|
|
|
TEST(OpKernel, all) {
|
|
|
|
auto net = std::make_shared<PlainNet>();
|
|
|
|
auto net = std::make_shared<NetOp>();
|
|
|
|
ASSERT_NE(net, nullptr);
|
|
|
|
ASSERT_NE(net, nullptr);
|
|
|
|
|
|
|
|
|
|
|
|
auto op1 = std::make_shared<TestOp>();
|
|
|
|
auto op1 = std::make_shared<TestOp>();
|
|
|
@ -71,28 +71,21 @@ TEST(OpKernel, all) {
|
|
|
|
ASSERT_EQ(2, run_cnt);
|
|
|
|
ASSERT_EQ(2, run_cnt);
|
|
|
|
ASSERT_THROW(net->AddOp(op2), paddle::platform::EnforceNotMet);
|
|
|
|
ASSERT_THROW(net->AddOp(op2), paddle::platform::EnforceNotMet);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST(AddBackwardOp, TestGradOp) {
|
|
|
|
|
|
|
|
auto net = std::make_shared<PlainNet>();
|
|
|
|
|
|
|
|
ASSERT_NE(net, nullptr);
|
|
|
|
|
|
|
|
net->AddOp(framework::OpRegistry::CreateOp("mul", {"X", "Y"}, {"Out"}, {}));
|
|
|
|
|
|
|
|
net->AddOp(
|
|
|
|
|
|
|
|
framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {"Out"}, {}));
|
|
|
|
|
|
|
|
net->AddOp(framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {""}, {}));
|
|
|
|
|
|
|
|
auto grad_ops = AddBackwardOp(net);
|
|
|
|
|
|
|
|
for (auto& op : grad_ops->ops_) {
|
|
|
|
|
|
|
|
op->DebugString();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// TODO(zhihong): add fc grad without registering.
|
|
|
|
//! TODO(yuyang18): Refine Backward Op.
|
|
|
|
// TEST(AddBackwardOp, TestNoGradOp) {
|
|
|
|
// TEST(AddBackwardOp, TestGradOp) {
|
|
|
|
// auto net = std::make_shared<PlainNet>();
|
|
|
|
// auto net = std::make_shared<NetOp>();
|
|
|
|
// ASSERT_NE(net, nullptr);
|
|
|
|
// ASSERT_NE(net, nullptr);
|
|
|
|
// net->AddOp(framework::OpRegistry::CreateOp("fc", {"X", "W", "b"}, {"Y"},
|
|
|
|
// net->AddOp(framework::OpRegistry::CreateOp("mul", {"X", "Y"}, {"Out"}, {}));
|
|
|
|
// {})); auto grad_ops = AddBackwardOp(net); for (auto& op : grad_ops->ops_) {
|
|
|
|
// net->AddOp(
|
|
|
|
// op->DebugString();
|
|
|
|
// framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {"Out"}, {}));
|
|
|
|
// }
|
|
|
|
// net->AddOp(framework::OpRegistry::CreateOp("add_two", {"X", "Y"}, {""},
|
|
|
|
// }
|
|
|
|
// {}));
|
|
|
|
|
|
|
|
// auto grad_ops = AddBackwardOp(net);
|
|
|
|
|
|
|
|
// for (auto& op : grad_ops->ops_) {
|
|
|
|
|
|
|
|
// op->DebugString();
|
|
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
//}
|
|
|
|
|
|
|
|
|
|
|
|
} // namespace framework
|
|
|
|
} // namespace framework
|
|
|
|
} // namespace paddle
|
|
|
|
} // namespace paddle
|
|
|
|