remove complex module direction (#29419)

revert-31562-mean
chentianyu03 5 years ago committed by GitHub
parent 6296f4ed09
commit acce962133
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,21 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import tensor
from .tensor_op_patch import monkey_patch_math_complex
from .tensor import *
__all__ = tensor.__all__ + []
monkey_patch_math_complex()

@ -1,40 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ...fluid import framework
def is_complex(x):
"""
Return true if the input(x) is a ComplexVariable.
"""
return isinstance(x, framework.ComplexVariable)
def is_real(x):
"""
Return true if the input(x) is a real number Variable.
"""
return isinstance(x, framework.Variable)
def complex_variable_exists(inputs, layer_name):
for inp in inputs:
if is_complex(inp):
return
err_msg = "At least one inputs of layer complex." if len(inputs) > 1 \
else "The input of layer complex."
raise ValueError(err_msg + layer_name +
"() must be ComplexVariable, please "
"use the layer for real numher instead.")

@ -1,24 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import linalg
from . import math
from . import manipulation
from .linalg import *
from .math import *
from .manipulation import *
__all__ = math.__all__
__all__ += linalg.__all__
__all__ += manipulation.__all__

@ -1,75 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..helper import is_complex, is_real, complex_variable_exists
from ....fluid.framework import ComplexVariable
from ....fluid import layers
__all__ = ['matmul', ]
def matmul(x, y, transpose_x=False, transpose_y=False, alpha=1.0, name=None):
"""
Applies matrix multiplication to two complex number tensors. See the
detailed description in :ref:`api_fluid_layers_matmul`.
Args:
x (ComplexVariable|Variable): The first input, can be a ComplexVariable
with data type complex64 or complex128, or a Variable with data type
float32 or float64.
y (ComplexVariable|Variable): The second input, can be a ComplexVariable
with data type complex64 or complex128, or a Variable with data type
float32 or float64.
transpose_x (bool): Whether to transpose :math:`x` before multiplication.
transpose_y (bool): Whether to transpose :math:`y` before multiplication.
alpha (float): The scale of output. Default 1.0.
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Returns:
ComplexVariable: The product result, with the same data type as inputs.
Examples:
.. code-block:: python
import numpy as np
import paddle
import paddle.fluid.dygraph as dg
with dg.guard():
x = np.array([[1.0 + 1j, 2.0 + 1j], [3.0+1j, 4.0+1j]])
y = np.array([1.0 + 1j, 1.0 + 1j])
x_var = dg.to_variable(x)
y_var = dg.to_variable(y)
result = paddle.complex.matmul(x_var, y_var)
print(result.numpy())
# [1.+5.j 5.+9.j]
"""
# x = a + bi, y = c + di
# P1 = ac; P2 = (a + b)(c + d); P3 = bd; then mm(x, y) = (P1-P3) + (P2-P1-P3)j
complex_variable_exists([x, y], "matmul")
a, b = (x.real, x.imag) if is_complex(x) else (x, None)
c, d = (y.real, y.imag) if is_complex(y) else (y, None)
P1 = layers.matmul(a, c, transpose_x, transpose_y, alpha, name)
if is_real(b) and is_real(d):
P2 = layers.matmul(a + b, c + d, transpose_x, transpose_y, alpha, name)
P3 = layers.matmul(b, d, transpose_x, transpose_y, alpha, name)
real = P1 - P3
imag = P2 - P1 - P3
elif is_real(b):
real = P1
imag = layers.matmul(b, c, transpose_x, transpose_y, alpha, name)
else:
real = P1
imag = layers.matmul(a, d, transpose_x, transpose_y, alpha, name)
return ComplexVariable(real, imag)

@ -1,142 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.common_ops_import import *
from ..helper import is_complex, is_real, complex_variable_exists
from ....fluid.framework import ComplexVariable
from ....fluid import layers
__all__ = [
'reshape',
'transpose',
]
def reshape(x, shape, inplace=False, name=None):
"""
To change the shape of ``x`` without changing its data.
There are some tricks when specifying the target shape.
1. -1 means the value of this dimension is inferred from the total element
number of x and remaining dimensions. Thus one and only one dimension can
be set -1.
2. 0 means the actual dimension value is going to be copied from the
corresponding dimension of x. The index of 0s in shape can not exceed
the dimension of x.
Here are some examples to explain it.
1. Given a 3-D tensor x with a shape [2, 4, 6], and the target shape
is [6, 8], the reshape operator will transform x into a 2-D tensor with
shape [6, 8] and leaving x's data unchanged.
2. Given a 3-D tensor x with a shape [2, 4, 6], and the target shape
specified is [2, 3, -1, 2], the reshape operator will transform x into a
4-D tensor with shape [2, 3, 4, 2] and leaving x's data unchanged. In this
case, one dimension of the target shape is set to -1, the value of this
dimension is inferred from the total element number of x and remaining
dimensions.
3. Given a 3-D tensor x with a shape [2, 4, 6], and the target shape
is [-1, 0, 3, 2], the reshape operator will transform x into a 4-D tensor
with shape [2, 4, 3, 2] and leaving x's data unchanged. In this case,
besides -1, 0 means the actual dimension value is going to be copied from
the corresponding dimension of x.
Args:
x(ComplexVariable): the input. A ``Tensor`` or ``LoDTensor`` , data
type: ``complex64`` or ``complex128``.
shape(list|tuple|Variable): target shape. At most one dimension of
the target shape can be -1. If ``shape`` is a list or tuple, the
elements of it should be integers or Tensors with shape [1] and
data type ``int32``. If ``shape`` is an Variable, it should be
an 1-D Tensor of data type ``int32``.
inplace(bool, optional): If ``inplace`` is True, the output of
``reshape`` is the same ComplexVariable as the input. Otherwise,
the input and output of ``reshape`` are different
ComplexVariables. Defaults to False. Note that if ``x``is more
than one OPs' input, ``inplace`` must be False.
name(str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name` .
Returns:
ComplexVariable: A ``Tensor`` or ``LoDTensor``. The data type is same as ``x``. It is a new ComplexVariable if ``inplace`` is ``False``, otherwise it is ``x``.
Raises:
ValueError: If more than one elements of ``shape`` is -1.
ValueError: If the element of ``shape`` is 0, the corresponding dimension should be less than or equal to the dimension of ``x``.
ValueError: If the elements in ``shape`` is negative except -1.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle.complex as cpx
import paddle.fluid.dygraph as dg
import numpy as np
x_np = np.random.randn(2, 3, 4) + 1j * np.random.randn(2, 3, 4)
place = fluid.CPUPlace()
with dg.guard(place):
x_var = dg.to_variable(x_np)
y_var = cpx.reshape(x_var, (2, -1))
y_np = y_var.numpy()
print(y_np.shape)
# (2, 12)
"""
complex_variable_exists([x], "reshape")
if inplace:
x.real = fluid.layers.reshape(x.real, shape, inplace=inplace, name=name)
x.imag = fluid.layers.reshape(x.imag, shape, inplace=inplace, name=name)
return x
out_real = fluid.layers.reshape(x.real, shape, inplace=inplace, name=name)
out_imag = fluid.layers.reshape(x.imag, shape, inplace=inplace, name=name)
return ComplexVariable(out_real, out_imag)
def transpose(x, perm, name=None):
"""
Permute the data dimensions for complex number :attr:`input` according to `perm`.
See :ref:`api_fluid_layers_transpose` for the real number API.
Args:
x (ComplexVariable): The input n-D ComplexVariable with data type
complex64 or complex128.
perm (list): Permute the input according to the value of perm.
name (str): The name of this layer. It is optional.
Returns:
ComplexVariable: A transposed n-D ComplexVariable, with the same data type as :attr:`input`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1.0 + 1.0j, 2.0 + 1.0j], [3.0+1.0j, 4.0+1.0j], [5.0+1.0j, 6.0+1.0j]])
x_transposed = paddle.complex.transpose(x, [1, 0])
print(x_transposed.numpy())
#[[1.+1.j 3.+1.j 5.+1.j]
# [2.+1.j 4.+1.j 6.+1.j]]
"""
complex_variable_exists([x], "transpose")
real = layers.transpose(x.real, perm, name)
imag = layers.transpose(x.imag, perm, name)
return ComplexVariable(real, imag)

File diff suppressed because it is too large Load Diff

@ -1,53 +0,0 @@
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from ...fluid import framework
from . import tensor
def monkey_patch_math_complex():
# complexVariable do not support scaler type now, so here not contains
# reverse methods, such as "__radd__", "__rsub__", "__rmul__", "__rdiv__",
# "__rtruediv__", "__rmatmul__".
complex_methods = [
('__add__', _binary_creator_('__add__', "elementwise_add", False)),
('__sub__', _binary_creator_('__sub__', "elementwise_sub", False)),
('__mul__', _binary_creator_('__mul__', "elementwise_mul", False)),
('__div__', _binary_creator_('__div__', "elementwise_div", False)),
('__truediv__', _binary_creator_('__truediv__', "elementwise_div",
False)),
('__matmul__', _binary_creator_('__matmul__', "matmul", False)),
]
for method in complex_methods:
method_name = method[0]
method_impl = method[1]
if method_impl:
setattr(framework.ComplexVariable, method_name, method_impl)
for method in tensor.__all__:
method_impl = getattr(tensor, method)
if method_impl:
setattr(framework.ComplexVariable, method, method_impl)
# for binary operator such as elementwise
def _binary_creator_(method_name, op_type, reverse=False):
def __impl__(self, other_var):
math_op = getattr(tensor, op_type)
return math_op(self, other_var)
__impl__.__name__ = method_name
return __impl__
Loading…
Cancel
Save