|
|
|
@ -219,6 +219,47 @@ class CrossEntropyLoss(unittest.TestCase):
|
|
|
|
|
self.assertTrue(np.allclose(static_ret, expected))
|
|
|
|
|
self.assertTrue(np.allclose(dy_ret_value, expected))
|
|
|
|
|
|
|
|
|
|
def test_cross_entropy_loss_1d_with_weight_none_func(self):
|
|
|
|
|
input_np = np.random.random([100, 200]).astype(np.float64) #N,C
|
|
|
|
|
label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) #N
|
|
|
|
|
weight_np = np.random.random([200]).astype(np.float64) #C
|
|
|
|
|
paddle.enable_static()
|
|
|
|
|
prog = fluid.Program()
|
|
|
|
|
startup_prog = fluid.Program()
|
|
|
|
|
place = fluid.CUDAPlace(0) if fluid.core.is_compiled_with_cuda(
|
|
|
|
|
) else fluid.CPUPlace()
|
|
|
|
|
with fluid.program_guard(prog, startup_prog):
|
|
|
|
|
input = fluid.data(name='input', shape=[100, 200], dtype='float64')
|
|
|
|
|
label = fluid.data(name='label', shape=[100], dtype='int64')
|
|
|
|
|
weight = fluid.data(name='weight', shape=[200], dtype='float64')
|
|
|
|
|
ret = paddle.nn.functional.cross_entropy(
|
|
|
|
|
input, label, weight=weight, reduction='none')
|
|
|
|
|
|
|
|
|
|
exe = fluid.Executor(place)
|
|
|
|
|
static_ret = exe.run(prog,
|
|
|
|
|
feed={
|
|
|
|
|
'input': input_np,
|
|
|
|
|
'label': label_np,
|
|
|
|
|
"weight": weight_np
|
|
|
|
|
},
|
|
|
|
|
fetch_list=[ret])
|
|
|
|
|
static_ret = np.squeeze(static_ret)
|
|
|
|
|
self.assertIsNotNone(static_ret)
|
|
|
|
|
with fluid.dygraph.guard():
|
|
|
|
|
dy_ret = paddle.nn.functional.cross_entropy(
|
|
|
|
|
fluid.dygraph.to_variable(input_np),
|
|
|
|
|
fluid.dygraph.to_variable(label_np),
|
|
|
|
|
weight=fluid.dygraph.to_variable(weight_np),
|
|
|
|
|
reduction='none')
|
|
|
|
|
dy_ret_value = dy_ret.numpy()
|
|
|
|
|
dy_ret_value = np.squeeze(dy_ret_value)
|
|
|
|
|
self.assertIsNotNone(dy_ret_value)
|
|
|
|
|
expected = cross_entropy_loss_1d(
|
|
|
|
|
input_np, label_np, weight=weight_np, reduction='none')
|
|
|
|
|
self.assertTrue(np.allclose(static_ret, dy_ret_value))
|
|
|
|
|
self.assertTrue(np.allclose(static_ret, expected))
|
|
|
|
|
self.assertTrue(np.allclose(dy_ret_value, expected))
|
|
|
|
|
|
|
|
|
|
def test_cross_entropy_loss_1d_mean(self):
|
|
|
|
|
input_np = np.random.random([100, 200]).astype(np.float64) #N,C
|
|
|
|
|
label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) #N,1
|
|
|
|
|