Calculating gradients for partial graph

Added backward.calc_gradient to backpropagate gradient from given targets to inputs.
detection_output_fixbug
xuwei06 7 years ago
parent 0ef9dc6122
commit 585dec3dc2

@ -87,7 +87,11 @@ class GradOpDescMakerBase {
auto onames = this->Output(name);
ret_val.reserve(onames.size());
std::transform(onames.begin(), onames.end(), std::back_inserter(ret_val),
GradVarName);
[this](const std::string& fwd_var_name) -> std::string {
auto g_name = GradVarName(fwd_var_name);
(*this->grad_to_var_)[g_name] = fwd_var_name;
return g_name;
});
return ret_val;
}

@ -129,7 +129,7 @@ class OpDesc {
}
proto::OpDesc desc_;
// input arg name => output variable names
// input arg name => input variable names
VariableNameMap inputs_;
// output arg name => output variable names
VariableNameMap outputs_;

File diff suppressed because it is too large Load Diff

@ -1,7 +1,17 @@
from ..registry import register_layer
__activations__ = [
'abs', 'tanh', 'sigmoid', 'relu', 'sqrt', 'ceil', 'floor', 'log', 'round'
'abs',
'ceil',
'exp',
'floor',
'log',
'relu',
'round',
'sigmoid',
'sqrt',
'square',
'tanh',
]
__all__ = [

@ -1,7 +1,8 @@
from ..layer_helper import LayerHelper
from ..param_attr import ParamAttr
__all__ = [
'create_tensor', 'cast', 'concat', 'sums', 'assign',
'create_tensor', 'create_parameter', 'cast', 'concat', 'sums', 'assign',
'fill_constant_batch_size_like', 'fill_constant', 'ones', 'zeros'
]
@ -11,6 +12,33 @@ def create_tensor(dtype, name=None):
return helper.create_variable(name=helper.name, dtype=dtype)
def create_parameter(shape,
dtype,
attr=None,
is_bias=False,
default_initializer=None):
"""
Create a parameter
Args:
shape(list[int]): shape of the parameter
dtype(string): element type of the parameter
attr(ParamAttr): attributes of the parameter
is_bias(bool): This can affect which default initializer is chosen
when default_initializer is None. If is_bias,
initializer.Constant(0.0) will be used. Otherwise,
Xavier() will be used.
default_initializer(Initializer): initializer for the parameter
Returns:
Parameter: the created parameter
"""
helper = LayerHelper("create_parameter")
if attr is None:
attr = ParamAttr()
return helper.create_parameter(attr, shape, dtype, is_bias,
default_initializer)
def cast(x, dtype):
"""
This function takes in the input with input_dtype
@ -180,7 +208,8 @@ def fill_constant_batch_size_like(input,
Examples:
.. code-block:: python
data = fluid.layers.fill_constant(shape=[1], value=0, dtype='int64')
data = fluid.layers.fill_constant_batch_size_like(
input=like, shape=[1], value=0, dtype='int64')
"""
helper = LayerHelper("fill_constant_batch_size_like", **locals())
out = helper.create_tmp_variable(dtype=dtype)

Loading…
Cancel
Save