|
|
@ -13,176 +13,84 @@
|
|
|
|
# limitations under the License.
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
from __future__ import print_function
|
|
|
|
import paddle
|
|
|
|
|
|
|
|
import paddle.tensor as tensor
|
|
|
|
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
|
|
|
|
from paddle.fluid import Program, program_guard
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
import unittest
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import paddle
|
|
|
|
|
|
|
|
import paddle.tensor as tensor
|
|
|
|
|
|
|
|
from paddle.static import Program, program_guard
|
|
|
|
|
|
|
|
|
|
|
|
class TestMultiplyAPI(unittest.TestCase):
|
|
|
|
|
|
|
|
"""TestMultiplyAPI."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __run_static_graph_case(self, x_data, y_data, axis=-1):
|
|
|
|
class TestMultiplyApi(unittest.TestCase):
|
|
|
|
|
|
|
|
def _run_static_graph_case(self, x_data, y_data):
|
|
|
|
with program_guard(Program(), Program()):
|
|
|
|
with program_guard(Program(), Program()):
|
|
|
|
paddle.enable_static()
|
|
|
|
paddle.enable_static()
|
|
|
|
x = paddle.static.data(
|
|
|
|
x = paddle.static.data(
|
|
|
|
name='x', shape=x_data.shape, dtype=x_data.dtype)
|
|
|
|
name='x', shape=x_data.shape, dtype=x_data.dtype)
|
|
|
|
y = paddle.static.data(
|
|
|
|
y = paddle.static.data(
|
|
|
|
name='y', shape=y_data.shape, dtype=y_data.dtype)
|
|
|
|
name='y', shape=y_data.shape, dtype=y_data.dtype)
|
|
|
|
res = tensor.multiply(x, y, axis=axis)
|
|
|
|
res = tensor.multiply(x, y)
|
|
|
|
|
|
|
|
|
|
|
|
place = fluid.CUDAPlace(0) if fluid.core.is_compiled_with_cuda(
|
|
|
|
|
|
|
|
) else fluid.CPUPlace()
|
|
|
|
|
|
|
|
exe = fluid.Executor(place)
|
|
|
|
|
|
|
|
outs = exe.run(fluid.default_main_program(),
|
|
|
|
|
|
|
|
feed={'x': x_data,
|
|
|
|
|
|
|
|
'y': y_data},
|
|
|
|
|
|
|
|
fetch_list=[res])
|
|
|
|
|
|
|
|
res = outs[0]
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __run_static_graph_case_with_numpy_input(self, x_data, y_data, axis=-1):
|
|
|
|
|
|
|
|
with program_guard(Program(), Program()):
|
|
|
|
|
|
|
|
paddle.enable_static()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
res = tensor.multiply(x_data, y_data, axis=axis)
|
|
|
|
place = paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda(
|
|
|
|
place = fluid.CUDAPlace(0) if fluid.core.is_compiled_with_cuda(
|
|
|
|
) else paddle.CPUPlace()
|
|
|
|
) else fluid.CPUPlace()
|
|
|
|
exe = paddle.static.Executor(place)
|
|
|
|
exe = fluid.Executor(place)
|
|
|
|
outs = exe.run(paddle.static.default_main_program(),
|
|
|
|
outs = exe.run(fluid.default_main_program(),
|
|
|
|
|
|
|
|
feed={'x': x_data,
|
|
|
|
feed={'x': x_data,
|
|
|
|
'y': y_data},
|
|
|
|
'y': y_data},
|
|
|
|
fetch_list=[res])
|
|
|
|
fetch_list=[res])
|
|
|
|
res = outs[0]
|
|
|
|
res = outs[0]
|
|
|
|
return res
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
def __run_dynamic_graph_case(self, x_data, y_data, axis=-1):
|
|
|
|
def _run_dynamic_graph_case(self, x_data, y_data):
|
|
|
|
paddle.disable_static()
|
|
|
|
paddle.disable_static()
|
|
|
|
x = paddle.to_tensor(x_data)
|
|
|
|
x = paddle.to_tensor(x_data)
|
|
|
|
y = paddle.to_tensor(y_data)
|
|
|
|
y = paddle.to_tensor(y_data)
|
|
|
|
res = paddle.multiply(x, y, axis=axis)
|
|
|
|
res = paddle.multiply(x, y)
|
|
|
|
return res.numpy()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __run_dynamic_graph_case_with_numpy_input(self, x_data, y_data,
|
|
|
|
|
|
|
|
axis=-1):
|
|
|
|
|
|
|
|
paddle.disable_static()
|
|
|
|
|
|
|
|
res = paddle.multiply(x_data, y_data, axis=axis)
|
|
|
|
|
|
|
|
return res.numpy()
|
|
|
|
return res.numpy()
|
|
|
|
|
|
|
|
|
|
|
|
def test_multiply(self):
|
|
|
|
def test_multiply(self):
|
|
|
|
"""test_multiply."""
|
|
|
|
|
|
|
|
np.random.seed(7)
|
|
|
|
np.random.seed(7)
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph: 1-d array
|
|
|
|
# test static computation graph: 1-d array
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
res = self.__run_static_graph_case(x_data, y_data)
|
|
|
|
res = self._run_static_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph: 1-d array
|
|
|
|
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
|
|
|
|
res = self.__run_static_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph: 2-d array
|
|
|
|
# test static computation graph: 2-d array
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
y_data = np.random.rand(2, 500)
|
|
|
|
y_data = np.random.rand(2, 500)
|
|
|
|
res = self.__run_static_graph_case(x_data, y_data)
|
|
|
|
res = self._run_static_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph with_primitives: 2-d array
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
|
|
|
|
y_data = np.random.rand(2, 500)
|
|
|
|
|
|
|
|
res = self.__run_static_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph: broadcast
|
|
|
|
# test static computation graph: broadcast
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
res = self.__run_static_graph_case(x_data, y_data)
|
|
|
|
res = self._run_static_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph with_primitives: broadcast
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
|
|
|
|
res = self.__run_static_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph: broadcast with axis
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 300, 40)
|
|
|
|
|
|
|
|
y_data = np.random.rand(300)
|
|
|
|
|
|
|
|
res = self.__run_static_graph_case(x_data, y_data, axis=1)
|
|
|
|
|
|
|
|
expected = np.multiply(x_data, y_data[..., np.newaxis])
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, expected))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test static computation graph with_primitives: broadcast with axis
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 300, 40)
|
|
|
|
|
|
|
|
y_data = np.random.rand(300)
|
|
|
|
|
|
|
|
res = self.__run_static_graph_case_with_numpy_input(
|
|
|
|
|
|
|
|
x_data, y_data, axis=1)
|
|
|
|
|
|
|
|
expected = np.multiply(x_data, y_data[..., np.newaxis])
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, expected))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: 1-d array
|
|
|
|
# test dynamic computation graph: 1-d array
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
res = self.__run_dynamic_graph_case(x_data, y_data)
|
|
|
|
res = self._run_dynamic_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic numpy input computation graph: 1-d array
|
|
|
|
|
|
|
|
x_data = np.random.rand(200)
|
|
|
|
|
|
|
|
y_data = np.random.rand(200)
|
|
|
|
|
|
|
|
res = self.__run_dynamic_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: 2-d array
|
|
|
|
# test dynamic computation graph: 2-d array
|
|
|
|
x_data = np.random.rand(20, 50)
|
|
|
|
x_data = np.random.rand(20, 50)
|
|
|
|
y_data = np.random.rand(20, 50)
|
|
|
|
y_data = np.random.rand(20, 50)
|
|
|
|
res = self.__run_dynamic_graph_case(x_data, y_data)
|
|
|
|
res = self._run_dynamic_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic numpy input computation graph: 1-d array
|
|
|
|
|
|
|
|
x_data = np.random.rand(20, 50)
|
|
|
|
|
|
|
|
y_data = np.random.rand(20, 50)
|
|
|
|
|
|
|
|
res = self.__run_dynamic_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: broadcast
|
|
|
|
# test dynamic computation graph: broadcast
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
res = self.__run_dynamic_graph_case(x_data, y_data)
|
|
|
|
res = self._run_dynamic_graph_case(x_data, y_data)
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph with numpy tensor: broadcast
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 500)
|
|
|
|
|
|
|
|
y_data = np.random.rand(500)
|
|
|
|
|
|
|
|
res = self.__run_dynamic_graph_case_with_numpy_input(x_data, y_data)
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, np.multiply(x_data, y_data)))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: broadcast with axis
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 300, 40)
|
|
|
|
|
|
|
|
y_data = np.random.rand(300)
|
|
|
|
|
|
|
|
res = self.__run_dynamic_graph_case(x_data, y_data, axis=1)
|
|
|
|
|
|
|
|
expected = np.multiply(x_data, y_data[..., np.newaxis])
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, expected))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph with numpy tensor: broadcast with axis
|
|
|
|
|
|
|
|
x_data = np.random.rand(2, 300, 40)
|
|
|
|
|
|
|
|
y_data = np.random.rand(300)
|
|
|
|
|
|
|
|
res = self.__run_dynamic_graph_case_with_numpy_input(
|
|
|
|
|
|
|
|
x_data, y_data, axis=1)
|
|
|
|
|
|
|
|
expected = np.multiply(x_data, y_data[..., np.newaxis])
|
|
|
|
|
|
|
|
self.assertTrue(np.allclose(res, expected))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMultiplyError(unittest.TestCase):
|
|
|
|
class TestMultiplyError(unittest.TestCase):
|
|
|
|
"""TestMultiplyError."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_errors(self):
|
|
|
|
def test_errors(self):
|
|
|
|
"""test_errors."""
|
|
|
|
|
|
|
|
# test static computation graph: dtype can not be int8
|
|
|
|
# test static computation graph: dtype can not be int8
|
|
|
|
paddle.enable_static()
|
|
|
|
paddle.enable_static()
|
|
|
|
with program_guard(Program(), Program()):
|
|
|
|
with program_guard(Program(), Program()):
|
|
|
@ -226,6 +134,35 @@ class TestMultiplyError(unittest.TestCase):
|
|
|
|
y = paddle.to_tensor(y_data)
|
|
|
|
y = paddle.to_tensor(y_data)
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x, y)
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x, y)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: dtype must be Tensor type
|
|
|
|
|
|
|
|
x_data = np.random.randn(200).astype(np.int64)
|
|
|
|
|
|
|
|
y_data = np.random.randn(200).astype(np.float64)
|
|
|
|
|
|
|
|
y = paddle.to_tensor(y_data)
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x_data, y)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: dtype must be Tensor type
|
|
|
|
|
|
|
|
x_data = np.random.randn(200).astype(np.int64)
|
|
|
|
|
|
|
|
y_data = np.random.randn(200).astype(np.float64)
|
|
|
|
|
|
|
|
x = paddle.to_tensor(x_data)
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x, y_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: dtype must be Tensor type
|
|
|
|
|
|
|
|
x_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
y_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
x = paddle.to_tensor(x_data)
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x, y_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: dtype must be Tensor type
|
|
|
|
|
|
|
|
x_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
y_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
x = paddle.to_tensor(x_data)
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x_data, y)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# test dynamic computation graph: dtype must be Tensor type
|
|
|
|
|
|
|
|
x_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
y_data = np.random.randn(200).astype(np.float32)
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, paddle.multiply, x_data, y_data)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|
|
|
|
unittest.main()
|
|
|
|