commit
15941dbd8c
@ -0,0 +1,216 @@
|
||||
# Design Doc: Python API
|
||||
|
||||
Due to the refactorization of the PaddlePaddle core, we need Python classes to construct corresponding protobuf messages that describe a DL program.
|
||||
|
||||
| Python classes | Protobuf messages |
|
||||
| --- | --- |
|
||||
| Program | ProgramDesc |
|
||||
| Block | BlockDesc |
|
||||
| Operator | OpDesc |
|
||||
| Variable | VarDesc |
|
||||
|
||||
Please be aware that these Python classes need to maintain some construction-time information, which are not part of the protobuf messages.
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### Program
|
||||
|
||||
A `ProgramDesc` describes a [DL program](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/design/program.md), which is composed of an array of `BlockDesc`s. A `BlockDesc` refers to its parent block by its index in the array. For example, operators in the step block of an RNN operator needs to be able to access variables in its ancessor blocks.
|
||||
|
||||
Whenever we create a block, we need set its parent block to the current block, so the Python class `Program` needs to maintain a data member `current_block`.
|
||||
|
||||
```python
|
||||
class Program(objects):
|
||||
def __init__(self):
|
||||
self.proto = core.NewProgram() # a C++ ProgramDesc pointer.
|
||||
self.blocks = vector<Block>()
|
||||
self.blocks.append(Block(self, -1)) # the global block
|
||||
self.current_block = 0 # initialized to the global block
|
||||
|
||||
def global_block():
|
||||
return self.blocks[0]
|
||||
|
||||
def current_block():
|
||||
return self.get_block(self.current_block)
|
||||
|
||||
def rollback():
|
||||
self.current_block = self.current_block().parent_idx
|
||||
|
||||
def create_block():
|
||||
new_block_idx = len(self.block)
|
||||
self.blocks.append(Block(self, self.current_block))
|
||||
self.current_block = new_block_idx
|
||||
return current_block()
|
||||
```
|
||||
|
||||
`Program` is an accessor to the protobuf message `ProgramDesc`, which is created in C++ space, because the InferShape function is in C++, which manipulates `VarDesc` messages, which are in turn members of `BlockDesc`, which is a member of `ProgramDesc`.
|
||||
|
||||
`Program` creates the first block as the global block in its constructor. All parameters and their initializer operators are in the global block.
|
||||
|
||||
### Block
|
||||
|
||||
A [Block](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/design/block.md) includes
|
||||
|
||||
1. a map from variable names to an instance of the Python `Variable` class, and
|
||||
1. a list of `Operator` instances.
|
||||
|
||||
```python
|
||||
class Block(objects):
|
||||
def __init__(self, program, parent_idx):
|
||||
self.proto = core.NewBlock(program.proto)
|
||||
self.program = program
|
||||
self.vars = map<string, Variable>()
|
||||
self.ops = vector<Operator>()
|
||||
self.parent_idx = parent_idx
|
||||
|
||||
def create_var(self, ...):
|
||||
return Variable(self, ...)
|
||||
|
||||
def _create_global_var(self, ...):
|
||||
program.global_block().create_var(...)
|
||||
|
||||
def create_parameter(self, name, ...):
|
||||
# Parameter is a subclass of variable. See Parameter section for details.
|
||||
self.vars[name] = Parameter(self._create_global_var(...), ...)
|
||||
return self.vars[name]
|
||||
|
||||
def append_operator(self, ...):
|
||||
self.ops.append(Operator(self, ...))
|
||||
|
||||
def prepend_operator(self, ...): # Parameter's ctor prepands initialize operators.
|
||||
self.ops.prepend(Operator(self, ...))
|
||||
```
|
||||
|
||||
`create_parameter` is necessary because parameters are global variables, those defined in the global block, but can be created in some sub-blocks, e.g., an FC layer in the step block of an RNN operator.
|
||||
|
||||
`prepand_operator` is necessary because the constructor of `Parameter` needs to create the initialize (or load) operator of the parameter, and would like to put it in the *preamble* of the global block.
|
||||
|
||||
### Operator
|
||||
|
||||
The `Operator` class fills in the `OpDesc` message and calls the C++ function `InferShape` to infer output shape from input shape.
|
||||
|
||||
```python
|
||||
class Operator(object):
|
||||
def __init__(self,
|
||||
block, # Block
|
||||
type, # string
|
||||
inputs, # dict<string, Variable>
|
||||
outputs,# dict<stirng, Variable>
|
||||
attrs # dict<string, Any>
|
||||
):
|
||||
self.proto = core.NewOpDesc(block.proto, type, inputs, outputs, attrs)
|
||||
core.infer_shape(self.proto, inputs, outputs)
|
||||
|
||||
def type(self):
|
||||
return self.proto.type()
|
||||
```
|
||||
|
||||
`Operator` creates the `OpDesc` message in C++ space, so could it call the `InferShape` function, which is in C++.
|
||||
|
||||
### Variable
|
||||
|
||||
Operators take Variables as its inputs and outputs.
|
||||
|
||||
```python
|
||||
class Variable(object):
|
||||
def __init__(self,
|
||||
block=None, # Block
|
||||
name=None, # string
|
||||
shape, # tuple
|
||||
dtype="float32", # string
|
||||
lod_level=None # int
|
||||
):
|
||||
if name is None:
|
||||
name = unique_name_generator()
|
||||
self.name = name
|
||||
self.block = block
|
||||
self.proto = core.NewVarDesc(block.proto, name, shape, lod_level)
|
||||
self.writer = None
|
||||
```
|
||||
|
||||
Please be aware of `self.writer`, that tracks operator who creates the variable. It possible that there are more than one operators who write a variable, but in Python space, each writes to a variable is represented by a Variable class. This is guaranteed by the fact that **`core.NewVarDesc` must NOT create a new `VarDesc` message if its name already exists in the specified block**.
|
||||
|
||||
### Parameter
|
||||
|
||||
A parameter is a global variable with an initializer (or load) operator.
|
||||
|
||||
```python
|
||||
class Parameter(Variable):
|
||||
def __init__(self,
|
||||
block=None, # Block
|
||||
name=None, # string
|
||||
shape, # tuple
|
||||
dtype="float32", # string
|
||||
lod_level=None # int
|
||||
trainable, # bool
|
||||
initialize_op_attrs,
|
||||
optimize_op_attrs):
|
||||
super(Parameter, self).__init__(block, name, shape, dtype, lod_level)
|
||||
self.trainable = trainable
|
||||
self.optimize_op_attrs = optimize_op_attrs
|
||||
block.prepend(Operator(block, # Block
|
||||
initialize_op_attrs['type'], # string
|
||||
None, # no inputs
|
||||
self, # output is the parameter
|
||||
initialize_op_attrs)
|
||||
```
|
||||
|
||||
When users create a parameter, s/he can call
|
||||
|
||||
```python
|
||||
program.create_parameter(
|
||||
...,
|
||||
init_attr={
|
||||
type: "uniform_random",
|
||||
min: -1.0,
|
||||
max: 1.0,
|
||||
})
|
||||
)
|
||||
```
|
||||
|
||||
In above example, `init_attr.type` names an initialize operator. It can also name the load operator
|
||||
|
||||
```python
|
||||
init_attr={
|
||||
type: "load",
|
||||
filename: "something.numpy",
|
||||
}
|
||||
```
|
||||
|
||||
`optimize_op_attrs` is not in the `VarDesc` message, but kept in the Python instance, as it will be used in the Python space when creating the optimize operator's `OpDesc`, and will be in the `OpDesc` message.
|
||||
|
||||
## Layer Functions
|
||||
|
||||
A layer is a Python function that creates some operators and variables. Layers simplify the work of application programmers.
|
||||
|
||||
### Data Layer
|
||||
|
||||
```python
|
||||
def data_layer(name, type, column_name):
|
||||
block = the_current_program.glolal_block()
|
||||
var = block.create_global_var(
|
||||
name=name,
|
||||
shape=[None] + type.dims(),
|
||||
dtype=type.dtype)
|
||||
block.prepend_operator(block,
|
||||
type="Feed",
|
||||
inputs = None,
|
||||
outputs = [var],
|
||||
{column_name: column_name})
|
||||
return var
|
||||
```
|
||||
|
||||
The input to the feed operator is a special variable in the global scope, which is the output of [Python readers](https://github.com/PaddlePaddle/Paddle/blob/develop/doc/design/reader/README.md).
|
||||
|
||||
### FC Layer
|
||||
|
||||
```python
|
||||
def fc_layer(input, size, ...):
|
||||
block = program.current_block()
|
||||
w = block.create_parameter(...)
|
||||
b = block.create_parameter(...)
|
||||
out = block.create_var()
|
||||
op = block.append_operator("FC", X=input, W=w, b=b, out=out)
|
||||
out.writer = op
|
||||
return out
|
||||
```
|
@ -0,0 +1,67 @@
|
||||
# Design Doc: Gradient Operators Registration
|
||||
|
||||
|
||||
## The Problem Posed
|
||||
|
||||
In our current operator registration mechanism, for each operator, the programmer should register a *gradient operator creator* function, which takes a C++ operator instance, and returns the corresponding gradient instance.
|
||||
|
||||
However, as we decided to separate the *compilation* and *execution* of DL models, we need to reshape the creator to take a protobuf `OpDesc` message, and returns a corresponding message.
|
||||
|
||||
More than that, the new registration mechanism need to support the fact that an operators' gradient computation might be a composition of operators.
|
||||
|
||||
## Current Implementation
|
||||
|
||||
OpInfos store in a association map which key is the operator type. The `grad_op_type` indicate associated gradient operator type. Operator can create gradient operator by `OpInfo::creator_` of gradient. The pseudo code is
|
||||
|
||||
```cpp
|
||||
struct OpInfo {
|
||||
std::function<OperatorBase*(...)> creator_;
|
||||
std::string grad_op_type_;
|
||||
...
|
||||
};
|
||||
|
||||
map<string, OpInfo> OpInfoMap;
|
||||
|
||||
OperatorBase* CreateGradientOperator(const OperatorBase& op) {
|
||||
return OpInfoMap.at(op.Type()).creator_(...);
|
||||
}
|
||||
```
|
||||
|
||||
## Proposed Solution
|
||||
|
||||
The mapping relationship between an operator and its gradient operators is a function. The interface of that function is:
|
||||
|
||||
```cpp
|
||||
// (OpDesc) --> vector<OpDesc>
|
||||
using GradOpDescMaker = std::function<std::vector<OpDesc>(const OpDesc&)>;
|
||||
```
|
||||
|
||||
The function take a `OpDesc` of the forward operator and return one or many gradient operator descriptions.
|
||||
|
||||
The `GradOpDescMaker` will be registered in `OpInfo`, to replace `grad_op_type_` field. The `OpInfo` should be
|
||||
|
||||
```cpp
|
||||
struct OpInfo {
|
||||
GradOpDescMaker grad_op_maker_;
|
||||
...
|
||||
};
|
||||
```
|
||||
|
||||
The `grad_op_maker_ ` is `nullptr` if the operator does not have associated gradient operators.
|
||||
|
||||
We should chagne register macros at the same time. In the current solution, there is no difference between forwarding operators and backward operators. So `REGISTER_OP` just register one operator. If the `REGISTER_OPERATOR ` contains `OpProtoAndCheckerMaker` and `GradOpDescMaker`, we just list them in the same macro. It can be done by a macro contains `__VA_ARGS__`.
|
||||
|
||||
The user interface should be
|
||||
|
||||
```cpp
|
||||
vector<OpDesc> MinusOpGradMaker(OpDesc) {...}
|
||||
REGISTER_OPERATOR(minus, MinusOp, MinusOpProtoAndCheckerMaker, SumOpGradMaker);
|
||||
// Developers can still manually implement gradient operator.
|
||||
REGISTER_OPERATOR(minus_grad, MinusGradOp);
|
||||
```
|
||||
|
||||
The interface of current `REGISTER_OP` macro could not be changed. In `REGISTER_OP`, it will invoke `REGISTER_OPERATOR` two times and generate GradOpDescMaker inside.
|
||||
|
||||
```cpp
|
||||
REGISTER_OP(minus, MinusOp, MinusOpProtoAndCheckerMaker, minus_grad, MinusGradOp);
|
||||
```
|
@ -0,0 +1,105 @@
|
||||
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/framework/op_info.h"
|
||||
#include "paddle/framework/op_proto_maker.h"
|
||||
#include "paddle/framework/operator.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace framework {
|
||||
namespace details {
|
||||
|
||||
enum OpInfoFillType {
|
||||
kOperator = 0,
|
||||
kOpProtoAndCheckerMaker = 1,
|
||||
kGradOpDescMaker = 2
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct OpInfoFillTypeID {
|
||||
static constexpr OpInfoFillType ID() {
|
||||
return std::is_base_of<OperatorBase, T>::value
|
||||
? kOperator
|
||||
: (std::is_base_of<OpProtoAndCheckerMaker, T>::value
|
||||
? kOpProtoAndCheckerMaker
|
||||
: (std::is_base_of<GradOpDescMakerBase, T>::value
|
||||
? kGradOpDescMaker
|
||||
: static_cast<OpInfoFillType>(-1)));
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T, OpInfoFillType = OpInfoFillTypeID<T>::ID()>
|
||||
struct OpInfoFiller;
|
||||
|
||||
template <size_t I, bool at_end, typename... ARGS>
|
||||
class OperatorRegistrarRecursive;
|
||||
|
||||
template <size_t I, typename... ARGS>
|
||||
class OperatorRegistrarRecursive<I, false, ARGS...> {
|
||||
public:
|
||||
using T = typename std::tuple_element<I, std::tuple<ARGS...>>::type;
|
||||
OperatorRegistrarRecursive(const char* op_type, OpInfo* info) {
|
||||
OpInfoFiller<T> fill;
|
||||
fill(op_type, info);
|
||||
constexpr auto size = sizeof...(ARGS);
|
||||
OperatorRegistrarRecursive<I + 1, I + 1 == size, ARGS...> reg(op_type,
|
||||
info);
|
||||
(void)(reg);
|
||||
}
|
||||
};
|
||||
|
||||
template <size_t I, typename... ARGS>
|
||||
class OperatorRegistrarRecursive<I, true, ARGS...> {
|
||||
public:
|
||||
OperatorRegistrarRecursive(const char* op_type, OpInfo* info) {}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct OpInfoFiller<T, kOperator> {
|
||||
void operator()(const char* op_type, OpInfo* info) const {
|
||||
info->creator_ = [](const std::string& type, const VariableNameMap& inputs,
|
||||
const VariableNameMap& outputs,
|
||||
const AttributeMap& attrs) {
|
||||
return new T(type, inputs, outputs, attrs);
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct OpInfoFiller<T, kOpProtoAndCheckerMaker> {
|
||||
void operator()(const char* op_type, OpInfo* info) const {
|
||||
info->proto_ = new OpProto;
|
||||
info->checker_ = new OpAttrChecker();
|
||||
auto maker = T(info->proto_, info->checker_);
|
||||
maker.Validate();
|
||||
info->proto_->set_type(op_type);
|
||||
PADDLE_ENFORCE(
|
||||
info->proto_->IsInitialized(),
|
||||
"Fail to initialize %s's OpProto, because %s is not initialized",
|
||||
op_type, info->proto_->InitializationErrorString());
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct OpInfoFiller<T, kGradOpDescMaker> {
|
||||
void operator()(const char* op_type, OpInfo* info) const {
|
||||
info->grad_op_maker_ = new T();
|
||||
}
|
||||
};
|
||||
} // namespace details
|
||||
|
||||
} // namespace framework
|
||||
} // namespace paddle
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue