|
|
|
@ -19,8 +19,6 @@
|
|
|
|
|
namespace paddle {
|
|
|
|
|
namespace operators {
|
|
|
|
|
|
|
|
|
|
using namespace paddle::framework; // NOLINT
|
|
|
|
|
|
|
|
|
|
namespace rnn {
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
@ -70,7 +68,7 @@ struct ArgumentName {
|
|
|
|
|
/**
|
|
|
|
|
* Prepare inputs for each step net.
|
|
|
|
|
*/
|
|
|
|
|
void SegmentInputs(const std::vector<Scope*>& step_scopes,
|
|
|
|
|
void SegmentInputs(const std::vector<framework::Scope*>& step_scopes,
|
|
|
|
|
const std::vector<Link>& inlinks,
|
|
|
|
|
const size_t seq_len,
|
|
|
|
|
bool infer_shape_mode);
|
|
|
|
@ -78,12 +76,12 @@ void SegmentInputs(const std::vector<Scope*>& step_scopes,
|
|
|
|
|
/**
|
|
|
|
|
* Process outputs of step nets and merge to variables.
|
|
|
|
|
*/
|
|
|
|
|
void ConcatOutputs(const std::vector<Scope*>& step_scopes,
|
|
|
|
|
void ConcatOutputs(const std::vector<framework::Scope*>& step_scopes,
|
|
|
|
|
const std::vector<Link>& outlinks,
|
|
|
|
|
const size_t seq_len,
|
|
|
|
|
bool infer_shape_mode);
|
|
|
|
|
|
|
|
|
|
void LinkMemories(const std::vector<Scope*>& step_scopes,
|
|
|
|
|
void LinkMemories(const std::vector<framework::Scope*>& step_scopes,
|
|
|
|
|
const std::vector<MemoryAttr>& memories,
|
|
|
|
|
const size_t step_id,
|
|
|
|
|
const int offset,
|
|
|
|
@ -103,14 +101,15 @@ void InitArgument(const ArgumentName& name, Argument* arg);
|
|
|
|
|
|
|
|
|
|
class RecurrentAlgorithm {
|
|
|
|
|
public:
|
|
|
|
|
void Run(const Scope& scope, const platform::DeviceContext& dev_ctx) const;
|
|
|
|
|
void Run(const framework::Scope& scope,
|
|
|
|
|
const platform::DeviceContext& dev_ctx) const;
|
|
|
|
|
|
|
|
|
|
void Init(std::unique_ptr<rnn::Argument> arg) { arg_ = std::move(arg); }
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* InferShape must be called before Run.
|
|
|
|
|
*/
|
|
|
|
|
void InferShape(const Scope& scope) const;
|
|
|
|
|
void InferShape(const framework::Scope& scope) const;
|
|
|
|
|
|
|
|
|
|
protected:
|
|
|
|
|
/*
|
|
|
|
@ -119,13 +118,15 @@ protected:
|
|
|
|
|
* NOTE the scopes are reused in both the forward and backward, so just
|
|
|
|
|
* create once and expand its size if more steps need.
|
|
|
|
|
*/
|
|
|
|
|
void CreateScopes(const Scope& scope) const;
|
|
|
|
|
void CreateScopes(const framework::Scope& scope) const;
|
|
|
|
|
|
|
|
|
|
const std::vector<Scope*>& GetStepScopes(const Scope& scope) const {
|
|
|
|
|
return *scope.FindVar(arg_->step_scopes)->GetMutable<std::vector<Scope*>>();
|
|
|
|
|
const std::vector<framework::Scope*>& GetStepScopes(
|
|
|
|
|
const framework::Scope& scope) const {
|
|
|
|
|
return *scope.FindVar(arg_->step_scopes)
|
|
|
|
|
->GetMutable<std::vector<framework::Scope*>>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void InitMemories(Scope* step_scopes, bool infer_shape_mode) const;
|
|
|
|
|
void InitMemories(framework::Scope* step_scopes, bool infer_shape_mode) const;
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
std::unique_ptr<rnn::Argument> arg_;
|
|
|
|
@ -146,18 +147,22 @@ class RecurrentGradientAlgorithm {
|
|
|
|
|
public:
|
|
|
|
|
void Init(std::unique_ptr<rnn::Argument> arg) { arg_ = std::move(arg); }
|
|
|
|
|
|
|
|
|
|
void Run(const Scope& scope, const platform::DeviceContext& dev_ctx) const;
|
|
|
|
|
void Run(const framework::Scope& scope,
|
|
|
|
|
const platform::DeviceContext& dev_ctx) const;
|
|
|
|
|
|
|
|
|
|
void LinkBootMemoryGradients(Scope* step_scopes, bool infer_shape_mode) const;
|
|
|
|
|
void LinkBootMemoryGradients(framework::Scope* step_scopes,
|
|
|
|
|
bool infer_shape_mode) const;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* InferShape must be called before Run.
|
|
|
|
|
*/
|
|
|
|
|
void InferShape(const Scope& scope) const;
|
|
|
|
|
void InferShape(const framework::Scope& scope) const;
|
|
|
|
|
|
|
|
|
|
protected:
|
|
|
|
|
inline const std::vector<Scope*>& GetStepScopes(const Scope& scope) const {
|
|
|
|
|
return *scope.FindVar(arg_->step_scopes)->GetMutable<std::vector<Scope*>>();
|
|
|
|
|
inline const std::vector<framework::Scope*>& GetStepScopes(
|
|
|
|
|
const framework::Scope& scope) const {
|
|
|
|
|
return *scope.FindVar(arg_->step_scopes)
|
|
|
|
|
->GetMutable<std::vector<framework::Scope*>>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
@ -165,16 +170,18 @@ private:
|
|
|
|
|
mutable size_t seq_len_;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class RecurrentOp final : public OperatorBase {
|
|
|
|
|
class RecurrentOp final : public framework::OperatorBase {
|
|
|
|
|
public:
|
|
|
|
|
void Init() override;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* InferShape must be called before Run.
|
|
|
|
|
*/
|
|
|
|
|
void InferShape(const Scope& scope) const override { alg_.InferShape(scope); }
|
|
|
|
|
void InferShape(const framework::Scope& scope) const override {
|
|
|
|
|
alg_.InferShape(scope);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Run(const Scope& scope,
|
|
|
|
|
void Run(const framework::Scope& scope,
|
|
|
|
|
const platform::DeviceContext& dev_ctx) const override {
|
|
|
|
|
alg_.Run(scope, dev_ctx);
|
|
|
|
|
}
|
|
|
|
@ -185,16 +192,18 @@ private:
|
|
|
|
|
RecurrentAlgorithm alg_;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class RecurrentGradientOp final : public OperatorBase {
|
|
|
|
|
class RecurrentGradientOp final : public framework::OperatorBase {
|
|
|
|
|
public:
|
|
|
|
|
void Init() override;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* InferShape must be called before Run.
|
|
|
|
|
*/
|
|
|
|
|
void InferShape(const Scope& scope) const override { alg_.InferShape(scope); }
|
|
|
|
|
void InferShape(const framework::Scope& scope) const override {
|
|
|
|
|
alg_.InferShape(scope);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Run(const Scope& scope,
|
|
|
|
|
void Run(const framework::Scope& scope,
|
|
|
|
|
const platform::DeviceContext& dev_ctx) const override {
|
|
|
|
|
alg_.Run(scope, dev_ctx);
|
|
|
|
|
}
|
|
|
|
|