|
|
|
|
@ -169,7 +169,7 @@ class TestElementwiseMulDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [7, 9]
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
@ -219,7 +219,7 @@ class TestElementwiseMulBroadcastDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [7, 9]
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
@ -246,7 +246,7 @@ class TestElementwiseAddDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [7, 9]
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
@ -273,7 +273,7 @@ class TestElementwiseAddBroadcastDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [7, 9]
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
@ -296,6 +296,60 @@ class TestElementwiseAddBroadcastDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
self.func(p)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestElementwiseSubDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
x = layers.data('x', shape, False, dtype)
|
|
|
|
|
y = layers.data('y', shape, False, dtype)
|
|
|
|
|
x.persistable = True
|
|
|
|
|
y.persistable = True
|
|
|
|
|
out = layers.elementwise_sub(x, y)
|
|
|
|
|
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
|
|
|
|
|
y_arr = np.random.uniform(-1, 1, shape).astype(dtype)
|
|
|
|
|
|
|
|
|
|
gradient_checker.double_grad_check(
|
|
|
|
|
[x, y], out, x_init=[x_arr, y_arr], place=place, eps=eps)
|
|
|
|
|
|
|
|
|
|
def test_grad(self):
|
|
|
|
|
places = [fluid.CPUPlace()]
|
|
|
|
|
if core.is_compiled_with_cuda():
|
|
|
|
|
places.append(fluid.CUDAPlace(0))
|
|
|
|
|
for p in places:
|
|
|
|
|
self.func(p)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestElementwiseSubBroadcastDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
# the shape of input variable shoule be clearly specified, not inlcude -1.
|
|
|
|
|
shape = [2, 3, 5, 7]
|
|
|
|
|
eps = 0.005
|
|
|
|
|
dtype = np.float64
|
|
|
|
|
|
|
|
|
|
x = layers.data('x', shape, False, dtype)
|
|
|
|
|
y = layers.data('y', shape[:-1], False, dtype)
|
|
|
|
|
x.persistable = True
|
|
|
|
|
y.persistable = True
|
|
|
|
|
out = layers.elementwise_sub(x, y, axis=0)
|
|
|
|
|
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
|
|
|
|
|
y_arr = np.random.uniform(-1, 1, shape[:-1]).astype(dtype)
|
|
|
|
|
|
|
|
|
|
gradient_checker.double_grad_check(
|
|
|
|
|
[x, y], out, x_init=[x_arr, y_arr], place=place, eps=eps)
|
|
|
|
|
|
|
|
|
|
def test_grad(self):
|
|
|
|
|
places = [fluid.CPUPlace()]
|
|
|
|
|
if core.is_compiled_with_cuda():
|
|
|
|
|
places.append(fluid.CUDAPlace(0))
|
|
|
|
|
for p in places:
|
|
|
|
|
self.func(p)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestMulDoubleGradCheck(unittest.TestCase):
|
|
|
|
|
@prog_scope()
|
|
|
|
|
def func(self, place):
|
|
|
|
|
|