|
|
|
|
@ -383,28 +383,37 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
LoD is short for Level of Details and is usually used for varied sequence
|
|
|
|
|
length. You can skip the following comment if you don't need optional LoD.
|
|
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
A LoDTensor X can look like the example below. It contains 2 sequences.
|
|
|
|
|
The first has length 2 and the second has length 3, as described by x.lod.
|
|
|
|
|
For example, a LoDTensor X can look like the example below. It contains
|
|
|
|
|
2 sequences. The first has length 2 and the second has length 3, as
|
|
|
|
|
described by x.lod.
|
|
|
|
|
|
|
|
|
|
The first tensor dimension 5=2+3 is calculated from LoD if it's available.
|
|
|
|
|
It means the total number of sequence element. In X, each element has 2
|
|
|
|
|
columns, hence [5, 2].
|
|
|
|
|
The first tensor dimension 5=2+3 is calculated from LoD if it's available.
|
|
|
|
|
It means the total number of sequence element. In X, each element has 2
|
|
|
|
|
columns, hence [5, 2].
|
|
|
|
|
|
|
|
|
|
x.lod = [[2, 3]]
|
|
|
|
|
x.data = [[1, 2], [3, 4],
|
|
|
|
|
[5, 6], [7, 8], [9, 10]]
|
|
|
|
|
x.shape = [5, 2]
|
|
|
|
|
x.lod = [[2, 3]]
|
|
|
|
|
|
|
|
|
|
x.data = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
|
|
|
|
|
|
|
|
|
|
LoD can have multiple levels (for example, a paragraph can have multiple
|
|
|
|
|
sentences and a sentence can have multiple words). In the following
|
|
|
|
|
LodTensor Y, the lod_level is 2. It means there are 2 sequence, the
|
|
|
|
|
first sequence length is 2 (has 2 sub-sequences), the second one's
|
|
|
|
|
length is 1. The first sequence's 2 sub-sequences have length 2 and 2,
|
|
|
|
|
respectively. And the second sequence's 1 sub-sequence has length 3.
|
|
|
|
|
x.shape = [5, 2]
|
|
|
|
|
|
|
|
|
|
y.lod = [[2 1], [2 2 3]]
|
|
|
|
|
y.shape = [2+2+3, ...]
|
|
|
|
|
LoD can have multiple levels (for example, a paragraph can have multiple
|
|
|
|
|
sentences and a sentence can have multiple words). In the following
|
|
|
|
|
LodTensor Y, the lod_level is 2. It means there are 2 sequence, the
|
|
|
|
|
first sequence length is 2 (has 2 sub-sequences), the second one's
|
|
|
|
|
length is 1. The first sequence's 2 sub-sequences have length 2 and 2,
|
|
|
|
|
respectively. And the second sequence's 1 sub-sequence has length 3.
|
|
|
|
|
|
|
|
|
|
y.lod = [[2 1], [2 2 3]]
|
|
|
|
|
|
|
|
|
|
y.shape = [2+2+3, ...]
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
|
|
|
|
|
Note:
|
|
|
|
|
In above description, LoD is length-based. In Paddle internal
|
|
|
|
|
@ -416,7 +425,6 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
self-explanatory. In this case, it must be length-based. Due to history
|
|
|
|
|
reasons. when LoD is called lod in public API, it might be offset-based.
|
|
|
|
|
Users should be careful about it.
|
|
|
|
|
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("__array__", [](Tensor &self) { return TensorToPyArray(self); })
|
|
|
|
|
.def("__init__",
|
|
|
|
|
@ -454,6 +462,16 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
lod (List[List[int]]): the lod to be set.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
t.set_lod([[0, 2, 5]])
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("set_recursive_sequence_lengths",
|
|
|
|
|
[](LoDTensor &self, const std::vector<std::vector<size_t>>
|
|
|
|
|
@ -480,6 +498,16 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
recursive_sequence_lengths (List[List[int]]): sequence lengths.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
t.set_recursive_sequence_lengths([[2, 3]])
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("lod",
|
|
|
|
|
[](LoDTensor &self) -> std::vector<std::vector<size_t>> {
|
|
|
|
|
@ -495,6 +523,17 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
out (List[List[int]]): the lod of the LoDTensor.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
t.set_lod([[0, 2, 5]])
|
|
|
|
|
print(t.lod()) # [[0, 2, 5]]
|
|
|
|
|
)DOC")
|
|
|
|
|
// Set above comments of set_lod.
|
|
|
|
|
.def("recursive_sequence_lengths",
|
|
|
|
|
@ -511,6 +550,17 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
out (List[List[int]): the sequence lengths.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
t.set_recursive_sequence_lengths([[2, 3]])
|
|
|
|
|
print(t.recursive_sequence_lengths()) # [[2, 3]]
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("has_valid_recursive_sequence_lengths",
|
|
|
|
|
[](LoDTensor &self) -> bool {
|
|
|
|
|
@ -523,6 +573,17 @@ PYBIND11_MODULE(core, m) {
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
out (bool): whether the lod is valid.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
t.set_recursive_sequence_lengths([[2, 3]])
|
|
|
|
|
print(t.has_valid_recursive_sequence_lengths()) # True
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("__getitem__", PySliceTensor, py::return_value_policy::reference,
|
|
|
|
|
R"DOC(
|
|
|
|
|
@ -985,7 +1046,16 @@ All parameter, weight, gradient are variables in Paddle.
|
|
|
|
|
return res;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
py::class_<LoDTensorArray>(m, "LoDTensorArray")
|
|
|
|
|
py::class_<LoDTensorArray>(m, "LoDTensorArray", R"DOC(
|
|
|
|
|
Array of LoDTensor.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
|
|
|
|
|
arr = fluid.LoDTensorArray()
|
|
|
|
|
)DOC")
|
|
|
|
|
.def("__init__",
|
|
|
|
|
[](LoDTensorArray &instance) { new (&instance) LoDTensorArray(); })
|
|
|
|
|
.def("__getitem__",
|
|
|
|
|
@ -1004,7 +1074,20 @@ All parameter, weight, gradient are variables in Paddle.
|
|
|
|
|
self.back().ShareDataWith(t);
|
|
|
|
|
self.back().set_lod(t.lod());
|
|
|
|
|
},
|
|
|
|
|
py::arg("tensor"), "Append a LoDensor to LoDTensorArray.");
|
|
|
|
|
py::arg("tensor"), R"DOC(
|
|
|
|
|
Append a LoDensor to LoDTensorArray.
|
|
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
arr = fluid.LoDTensorArray()
|
|
|
|
|
t = fluid.LoDTensor()
|
|
|
|
|
t.set(np.ndarray([5, 30]), fluid.CPUPlace())
|
|
|
|
|
arr.append(t)
|
|
|
|
|
)DOC");
|
|
|
|
|
|
|
|
|
|
m.def("IsInplace",
|
|
|
|
|
[](std::string op) -> bool { return operators::IsInplace(op); });
|
|
|
|
|
|