|
|
|
@ -173,8 +173,8 @@ TEST(Backward, simple_op_grad) {
|
|
|
|
|
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
|
|
|
|
|
ASSERT_NE(fwd, nullptr);
|
|
|
|
|
auto gop = f::OpRegistry::CreateGradOp(*fwd);
|
|
|
|
|
ASSERT_EQ(1UL, gop->inputs_.size());
|
|
|
|
|
ASSERT_EQ("rowwise_add_grad", gop->type_);
|
|
|
|
|
ASSERT_EQ(1UL, gop->Inputs().size());
|
|
|
|
|
ASSERT_EQ("rowwise_add_grad", gop->Type());
|
|
|
|
|
ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X")));
|
|
|
|
|
ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b")));
|
|
|
|
|
}
|
|
|
|
@ -210,13 +210,13 @@ TEST(Backward, net_fc_backward_normal) {
|
|
|
|
|
ASSERT_EQ(3UL, net->ops_.size());
|
|
|
|
|
|
|
|
|
|
f::OperatorBase &d_sigmoid = *net->ops_[0];
|
|
|
|
|
ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
|
|
|
|
|
ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
|
|
|
|
|
|
|
|
|
|
f::OperatorBase &d_add = *net->ops_[1];
|
|
|
|
|
ASSERT_EQ("rowwise_add_grad", d_add.type_);
|
|
|
|
|
ASSERT_EQ("rowwise_add_grad", d_add.Type());
|
|
|
|
|
|
|
|
|
|
f::OperatorBase &d_mul = *net->ops_[2];
|
|
|
|
|
ASSERT_EQ("mul_grad", d_mul.type_);
|
|
|
|
|
ASSERT_EQ("mul_grad", d_mul.Type());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST(Backward, net_fc_backward_not_have_b) {
|
|
|
|
@ -236,10 +236,10 @@ TEST(Backward, net_fc_backward_not_have_b) {
|
|
|
|
|
ASSERT_EQ(2UL, net->ops_.size());
|
|
|
|
|
|
|
|
|
|
f::OperatorBase &d_sigmoid = *net->ops_[0];
|
|
|
|
|
ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
|
|
|
|
|
ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
|
|
|
|
|
|
|
|
|
|
f::OperatorBase &d_mul = *net->ops_[1];
|
|
|
|
|
ASSERT_EQ("mul_grad", d_mul.type_);
|
|
|
|
|
ASSERT_EQ("mul_grad", d_mul.Type());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST(Backward, net_input_of_network_not_need_grad) {
|
|
|
|
@ -293,7 +293,7 @@ TEST(Backward, net_shared_weight) {
|
|
|
|
|
ASSERT_TRUE(bwd->IsNetOp());
|
|
|
|
|
auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
|
|
|
|
|
ASSERT_EQ(3UL, bwd_net->ops_.size());
|
|
|
|
|
ASSERT_EQ("add", bwd_net->ops_[2]->type_);
|
|
|
|
|
ASSERT_EQ("add", bwd_net->ops_[2]->Type());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST(Backward, op_register_grad_not_for_network) {
|
|
|
|
@ -334,15 +334,15 @@ TEST(Backward, op_part_of_output_are_not_need) {
|
|
|
|
|
ASSERT_EQ(net->ops_.size(), 2UL);
|
|
|
|
|
|
|
|
|
|
auto &fill_zero = *net->ops_[0];
|
|
|
|
|
ASSERT_EQ("fill_zeros_like", fill_zero.type_);
|
|
|
|
|
ASSERT_EQ("fill_zeros_like", fill_zero.Type());
|
|
|
|
|
ASSERT_EQ(1UL, fill_zero.Inputs("Src").size());
|
|
|
|
|
ASSERT_EQ("Z", fill_zero.Input("Src"));
|
|
|
|
|
ASSERT_EQ(1UL, fill_zero.Outputs("Dst").size());
|
|
|
|
|
ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Dst"));
|
|
|
|
|
|
|
|
|
|
auto &d_many_out = *net->ops_[1];
|
|
|
|
|
ASSERT_EQ("many_output_op_grad", d_many_out.type_);
|
|
|
|
|
ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG
|
|
|
|
|
ASSERT_EQ("many_output_op_grad", d_many_out.Type());
|
|
|
|
|
ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.Inputs().size()); // I/O/OG
|
|
|
|
|
ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
|
|
|
|
|
d_many_out.Input(f::GradVarName("z")));
|
|
|
|
|
ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
|
|
|
|
@ -354,9 +354,9 @@ TEST(Backward, op_part_of_input_are_not_need) {
|
|
|
|
|
{{"Out", {"out"}}}, {});
|
|
|
|
|
auto backward = f::Backward(*fwd, {"a"});
|
|
|
|
|
auto &grad_mul = *backward;
|
|
|
|
|
ASSERT_EQ(grad_mul.type_, "mul_grad");
|
|
|
|
|
ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
|
|
|
|
|
ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
|
|
|
|
|
ASSERT_EQ(grad_mul.Type(), "mul_grad");
|
|
|
|
|
ASSERT_EQ(grad_mul.Inputs().size(), 2UL + 1UL + 1UL);
|
|
|
|
|
ASSERT_EQ(grad_mul.Outputs().size(), 2UL);
|
|
|
|
|
ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName);
|
|
|
|
|
ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b"));
|
|
|
|
|
ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
|
|
|
|
@ -394,18 +394,18 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
|
|
|
|
|
auto &grad_fc = *bwd_net->ops_[0];
|
|
|
|
|
|
|
|
|
|
const char *all = paddle::operators::NetOp::kAll;
|
|
|
|
|
EXPECT_EQ(grad_fc.inputs_[all].size(),
|
|
|
|
|
EXPECT_EQ(grad_fc.Inputs(all).size(),
|
|
|
|
|
2UL /* external input number */
|
|
|
|
|
+ 1UL /* external output number*/
|
|
|
|
|
+ 1UL /* number of gradient of external output*/
|
|
|
|
|
+ 2U /* internal variable number*/);
|
|
|
|
|
EXPECT_EQ(grad_fc.outputs_[all].size(),
|
|
|
|
|
EXPECT_EQ(grad_fc.Outputs(all).size(),
|
|
|
|
|
2UL /* input number of mul*/
|
|
|
|
|
+ 2UL /* input number of rowwise_add
|
|
|
|
|
*/
|
|
|
|
|
+ 1UL /* input number of sigmod */);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[1]->inputs_[all].size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[1]->outputs_[all].size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[2]->inputs_[all].size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[2]->outputs_[all].size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[1]->Inputs(all).size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[1]->Outputs(all).size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[2]->Inputs(all).size(), 0UL);
|
|
|
|
|
EXPECT_EQ(bwd_net->ops_[2]->Outputs(all).size(), 0UL);
|
|
|
|
|
}
|
|
|
|
|