fix gou test bug

enforce_failed
qijun 8 years ago
parent 090b8114e6
commit 15627e48d8

@ -197,7 +197,7 @@ class OpTest(unittest.TestCase):
def check_output(self): def check_output(self):
places = [core.CPUPlace()] places = [core.CPUPlace()]
if core.is_compile_gpu() and self.op.support_gpu(): if core.is_compile_gpu():
places.append(core.GPUPlace(0)) places.append(core.GPUPlace(0))
for place in places: for place in places:
self.check_output_with_place(place) self.check_output_with_place(place)
@ -270,6 +270,6 @@ class OpTest(unittest.TestCase):
for c_grad, g_grad, name in itertools.izip( for c_grad, g_grad, name in itertools.izip(
cpu_analytic_grads, gpu_analytic_grads, grad_names): cpu_analytic_grads, gpu_analytic_grads, grad_names):
self.assertTrue( self.assertTrue(
numpy.allclose( np.allclose(
c_grad, g_grad, atol=1e-4), c_grad, g_grad, atol=1e-4),
"output name: " + name + " has diff") "output name: " + name + " has diff")

@ -8,7 +8,7 @@ class TestCrossEntropy(OpTest):
self.op_type = "onehot_cross_entropy" self.op_type = "onehot_cross_entropy"
batch_size = 30 batch_size = 30
class_num = 10 class_num = 10
X = numpy.random.random((batch_size, class_num)).astype("float32") X = numpy.random.uniform(0.1, 1.0, [batch_size, class_num]).astype("float32")
label = (class_num / 2) * numpy.ones(batch_size).astype("int32") label = (class_num / 2) * numpy.ones(batch_size).astype("int32")
self.inputs = {'X': X, 'label': label} self.inputs = {'X': X, 'label': label}
Y = [] Y = []

Loading…
Cancel
Save