|
|
|
@ -15,9 +15,11 @@
|
|
|
|
|
#include "paddle/framework/backward.h"
|
|
|
|
|
#include "paddle/operators/net_op.h"
|
|
|
|
|
|
|
|
|
|
#include <deque>
|
|
|
|
|
#include <list>
|
|
|
|
|
#include <memory>
|
|
|
|
|
|
|
|
|
|
#include "paddle/framework/block_desc.h"
|
|
|
|
|
#include "paddle/framework/op_registry.h"
|
|
|
|
|
#include "paddle/operators/net_op.h"
|
|
|
|
|
#include "paddle/operators/recurrent_op.h"
|
|
|
|
@ -270,5 +272,145 @@ std::unique_ptr<OperatorBase> Backward(
|
|
|
|
|
return BackwardRecursive(forwardOp, no_grad_names, uid);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ==================================== //
|
|
|
|
|
|
|
|
|
|
static bool AllGradInSet(const std::vector<std::string>& names,
|
|
|
|
|
const std::unordered_set<std::string>& set) {
|
|
|
|
|
for (const std::string& name : names) {
|
|
|
|
|
if (!set.count(GradVarName(name))) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
|
|
|
|
|
const std::unique_ptr<OpDescBind>& op_desc,
|
|
|
|
|
std::unordered_set<std::string>& no_grad_vars) {
|
|
|
|
|
std::vector<std::unique_ptr<OpDescBind>> grad_op_descs;
|
|
|
|
|
// All input gradients of forwarding operator do not need to calculat.
|
|
|
|
|
const std::vector<std::string>& inputs = op_desc->InputArgumentNames();
|
|
|
|
|
if (AllGradInSet(inputs, no_grad_vars)) {
|
|
|
|
|
return grad_op_descs; // empty vector
|
|
|
|
|
}
|
|
|
|
|
// All output gradients of forwarding operator do not need to calculate.
|
|
|
|
|
const std::vector<std::string>& outputs = op_desc->OutputArgumentNames();
|
|
|
|
|
if (AllGradInSet(outputs, no_grad_vars)) {
|
|
|
|
|
for (const std::string& name : inputs) {
|
|
|
|
|
no_grad_vars.insert(GradVarName(name));
|
|
|
|
|
}
|
|
|
|
|
return grad_op_descs; // empty vector
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
grad_op_descs = OpRegistry::CreateGradOpDescs(*op_desc);
|
|
|
|
|
|
|
|
|
|
std::list<std::unique_ptr<OpDescBind>> pending_fill_zeros_ops;
|
|
|
|
|
for (auto& desc : grad_op_descs) {
|
|
|
|
|
for (const std::string& in_name : desc->InputArgumentNames()) {
|
|
|
|
|
if (no_grad_vars.count(in_name)) {
|
|
|
|
|
std::string prefix = in_name.substr(
|
|
|
|
|
0, in_name.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
|
|
|
|
|
std::string new_name = prefix + kZeroVarSuffix;
|
|
|
|
|
desc->Rename(in_name, new_name);
|
|
|
|
|
std::unique_ptr<OpDescBind> fill_zeros_op(new OpDescBind(
|
|
|
|
|
"fill_zeros_like", {{"X", {prefix}}}, {{"Y", {new_name}}}, {}));
|
|
|
|
|
pending_fill_zeros_ops.push_back(std::move(fill_zeros_op));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (const std::string& out_name : desc->OutputArgumentNames()) {
|
|
|
|
|
if (no_grad_vars.count(out_name)) {
|
|
|
|
|
desc->Rename(out_name, kEmptyVarName);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (auto& p : pending_fill_zeros_ops) {
|
|
|
|
|
grad_op_descs.insert(grad_op_descs.begin(), std::move(p));
|
|
|
|
|
}
|
|
|
|
|
return grad_op_descs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
|
|
|
|
|
ProgramDescBind& program_desc, int block_idx,
|
|
|
|
|
std::unordered_set<std::string>& no_grad_vars) {
|
|
|
|
|
BlockDescBind* cur_block = program_desc.Block(block_idx);
|
|
|
|
|
std::deque<std::unique_ptr<OpDescBind>>& op_descs = cur_block->ops_;
|
|
|
|
|
std::unordered_map<std::string, std::vector<size_t>> dup_out_ops;
|
|
|
|
|
size_t grad_desc_idx = 0;
|
|
|
|
|
std::vector<std::unique_ptr<OpDescBind>> backward_descs;
|
|
|
|
|
for (auto it = op_descs.rbegin(); it != op_descs.rend(); ++it) {
|
|
|
|
|
std::vector<std::unique_ptr<OpDescBind>> op_grads =
|
|
|
|
|
MakeOpGrad(*it, no_grad_vars);
|
|
|
|
|
|
|
|
|
|
if ((*it)->Type() == "recurrent") {
|
|
|
|
|
PADDLE_ENFORCE_EQ(
|
|
|
|
|
op_grads.size(), size_t(1),
|
|
|
|
|
"rnn_op's gradient process should contain only one op.");
|
|
|
|
|
int step_block_idx = (*it)->GetBlockAttr("stop_block");
|
|
|
|
|
auto backward_block_op_descs =
|
|
|
|
|
MakeBlockBackward(program_desc, step_block_idx, no_grad_vars);
|
|
|
|
|
BlockDescBind* backward_block = program_desc.AppendBlock(*cur_block);
|
|
|
|
|
for (auto& ptr : backward_block_op_descs) {
|
|
|
|
|
backward_block->ops_.push_back(std::move(ptr));
|
|
|
|
|
}
|
|
|
|
|
op_grads[0]->SetBlockAttr("step_block", *backward_block);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (const auto& desc : op_grads) {
|
|
|
|
|
for (const std::string& out_name : desc->OutputArgumentNames()) {
|
|
|
|
|
dup_out_ops[out_name].emplace_back(grad_desc_idx);
|
|
|
|
|
}
|
|
|
|
|
++grad_desc_idx;
|
|
|
|
|
}
|
|
|
|
|
std::transform(
|
|
|
|
|
op_grads.begin(), op_grads.end(), std::back_inserter(backward_descs),
|
|
|
|
|
[](std::unique_ptr<OpDescBind>& ptr) { return std::move(ptr); });
|
|
|
|
|
}
|
|
|
|
|
// Check whether some variables are written more than once
|
|
|
|
|
std::list<std::pair<size_t, std::unique_ptr<OpDescBind>>> pending_sum_ops;
|
|
|
|
|
for (const auto& dup : dup_out_ops) {
|
|
|
|
|
const std::string& out_name = dup.first;
|
|
|
|
|
const std::vector<size_t> dup_op = dup.second;
|
|
|
|
|
if (out_name != kEmptyVarName && dup_op.size() > 1) {
|
|
|
|
|
std::vector<std::string> sum_op_inputs;
|
|
|
|
|
for (size_t i = 0; i < dup_op.size(); ++i) {
|
|
|
|
|
std::string new_name = out_name + "@RENAME@" + std::to_string(i);
|
|
|
|
|
backward_descs[dup_op[i]]->Rename(out_name, new_name);
|
|
|
|
|
sum_op_inputs.emplace_back(new_name);
|
|
|
|
|
}
|
|
|
|
|
std::unique_ptr<OpDescBind> sum_op(new OpDescBind(
|
|
|
|
|
"sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}}, {}));
|
|
|
|
|
pending_sum_ops.push_back({dup_op.back(), std::move(sum_op)});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
pending_sum_ops.sort(
|
|
|
|
|
[](const std::pair<size_t, std::unique_ptr<OpDescBind>>& a,
|
|
|
|
|
const std::pair<size_t, std::unique_ptr<OpDescBind>>& b) {
|
|
|
|
|
return a.first > b.first;
|
|
|
|
|
});
|
|
|
|
|
for (auto& p : pending_sum_ops) {
|
|
|
|
|
backward_descs.insert(backward_descs.begin() + p.first + 1,
|
|
|
|
|
std::move(p.second));
|
|
|
|
|
}
|
|
|
|
|
return backward_descs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void AppendBackward(ProgramDescBind& program_desc,
|
|
|
|
|
const std::unordered_set<std::string>& no_grad_vars) {
|
|
|
|
|
std::unordered_set<std::string> no_grad_var_names;
|
|
|
|
|
no_grad_var_names.reserve(no_grad_vars.size() + 1);
|
|
|
|
|
no_grad_var_names.insert(std::string(kEmptyVarName) + kGradVarSuffix);
|
|
|
|
|
for (auto& name : no_grad_vars) {
|
|
|
|
|
no_grad_var_names.insert(GradVarName(name));
|
|
|
|
|
}
|
|
|
|
|
const int root_block_idx = 0;
|
|
|
|
|
auto backward_op_descs =
|
|
|
|
|
MakeBlockBackward(program_desc, root_block_idx, no_grad_var_names);
|
|
|
|
|
auto& forw_op_descs = program_desc.Block(root_block_idx)->ops_;
|
|
|
|
|
for (auto& ptr : backward_op_descs) {
|
|
|
|
|
forw_op_descs.push_back(std::move(ptr));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // namespace framework
|
|
|
|
|
} // namespace paddle
|
|
|
|
|