add bilinear_tensor_product layer

panyx0718-patch-1
Qiao Longfei 6 years ago
parent f8b2680c53
commit 6097b8b365

@ -162,6 +162,7 @@ __all__ = [
'grid_sampler', 'grid_sampler',
'log_loss', 'log_loss',
'add_position_encoding', 'add_position_encoding',
'bilinear_tensor_product',
] ]
@ -8046,3 +8047,78 @@ def add_position_encoding(input, alpha, beta, name=None):
attrs={"alpha": alpha, attrs={"alpha": alpha,
"beta": beta}) "beta": beta})
return out return out
def bilinear_tensor_product(x,
y,
size,
act=None,
name=None,
param_attr=None,
bias_attr=None):
"""
**Add Position Encoding Layer**
This layer performs tensor operation on two inputs.
For example:
.. math::
y_{i} = x * W_{i} * {y^\mathrm{T}}, i=0,1,...,K-1
In this formular:
- :math:`x`: the first input contains M elements.
- :math:`y`: the second input contains N elements.
- :math:`y_{i}`: the i-th element of y.
- :math:`W_{i}`: the i-th learned weight, shape is [M, N]
- :math:`y^\mathrm{T}`: the transpose of :math:`y_{2}`.
The simple usage is:
.. code-block:: python
tensor = bilinear_tensor_product(x=layer1, y=layer2, size=1000)
Args:
x (Variable): 3-D input tensor with shape [N x M x P]
y (Variable): 3-D input tensor with shape [N x M x P]
size (int): The dimension of this layer.
act (str, default None): Activation to be applied to the output of this layer.
name (str, default None): The name of this layer.
param_attr (ParamAttr|list of ParamAttr, default None): The parameter attribute for learnable
parameters/weights of this layer.
bias_attr (ParamAttr|list of ParamAttr, default None): The parameter attribute for the bias
of this layer. If it is set to False, no bias will be added to the output units.
If it is set to None, the bias is initialized zero. Default: None.
Returns:
Variable: A 3-D Tensor of shape [N x M x P] with positional encoding.
Examples:
.. code-block:: python
position_tensor = fluid.layers.add_position_encoding(input=tensor)
"""
helper = LayerHelper('bilinear_tensor_product', **locals())
dtype = helper.input_dtype()
param_shape = [size, x.shape[1], y.shape[1]]
w = helper.create_parameter(
attr=param_attr, shape=param_shape, dtype=dtype, is_bias=False)
if name is None:
out = helper.create_variable_for_type_inference(dtype=dtype)
else:
out = helper.create_variable(name=name, dtype=dtype, persistable=False)
inputs = {"X": x, "Y": y, "Weight": w}
if helper.bias_attr:
bias_size = [1, size]
bias = helper.create_parameter(
attr=helper.bias_attr, shape=bias_size, dtype=dtype, is_bias=True)
inputs["Bias"] = bias
helper.append_op(
type="bilinear_tensor_product", inputs=inputs, outputs={"Out": out})
# add activation
return helper.append_activation(out)

Loading…
Cancel
Save