|
|
|
@ -12,30 +12,30 @@ class TestSoftmaxWithCrossEntropyOp(OpTest):
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "softmax_with_cross_entropy"
|
|
|
|
|
batch_size = 3
|
|
|
|
|
batch_size = 2
|
|
|
|
|
class_num = 37
|
|
|
|
|
|
|
|
|
|
logits = np.random.uniform(0.1, 1.0,
|
|
|
|
|
[batch_size, class_num]).astype("float32")
|
|
|
|
|
[batch_size, class_num]).astype("float64")
|
|
|
|
|
softmax = np.apply_along_axis(stable_softmax, 1, logits)
|
|
|
|
|
labels = np.random.randint(0, class_num, [batch_size, 1], dtype="int32")
|
|
|
|
|
labels = np.random.randint(0, class_num, [batch_size, 1], dtype="int64")
|
|
|
|
|
|
|
|
|
|
cross_entropy = np.asmatrix(
|
|
|
|
|
[[-np.log(softmax[i][labels[i][0]])]
|
|
|
|
|
for i in range(softmax.shape[0])],
|
|
|
|
|
dtype="float32")
|
|
|
|
|
dtype="float64")
|
|
|
|
|
|
|
|
|
|
self.inputs = {"Logits": logits, "Label": labels}
|
|
|
|
|
self.outputs = {
|
|
|
|
|
"Softmax": softmax.astype('float32'),
|
|
|
|
|
"Loss": cross_entropy.astype('float32')
|
|
|
|
|
"Softmax": softmax.astype("float64"),
|
|
|
|
|
"Loss": cross_entropy.astype("float64")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def test_check_output(self):
|
|
|
|
|
self.check_output()
|
|
|
|
|
|
|
|
|
|
def test_check_grad(self):
|
|
|
|
|
self.check_grad(["Logits"], "Loss", max_relative_error=0.05)
|
|
|
|
|
self.check_grad(["Logits"], "Loss")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestSoftmaxWithCrossEntropyOp2(OpTest):
|
|
|
|
@ -49,19 +49,19 @@ class TestSoftmaxWithCrossEntropyOp2(OpTest):
|
|
|
|
|
class_num = 37
|
|
|
|
|
|
|
|
|
|
logits = np.random.uniform(0.1, 1.0,
|
|
|
|
|
[batch_size, class_num]).astype("float32")
|
|
|
|
|
[batch_size, class_num]).astype("float64")
|
|
|
|
|
softmax = np.apply_along_axis(stable_softmax, 1, logits)
|
|
|
|
|
labels = np.random.uniform(0.1, 1.0,
|
|
|
|
|
[batch_size, class_num]).astype("float32")
|
|
|
|
|
[batch_size, class_num]).astype("float64")
|
|
|
|
|
labels /= np.sum(labels, axis=1, keepdims=True)
|
|
|
|
|
|
|
|
|
|
cross_entropy = (-labels * np.log(softmax)).sum(
|
|
|
|
|
axis=1, keepdims=True).astype("float32")
|
|
|
|
|
axis=1, keepdims=True).astype("float64")
|
|
|
|
|
|
|
|
|
|
self.inputs = {"Logits": logits, "Label": labels}
|
|
|
|
|
self.outputs = {
|
|
|
|
|
"Softmax": softmax.astype('float32'),
|
|
|
|
|
"Loss": cross_entropy.astype('float32')
|
|
|
|
|
"Softmax": softmax.astype("float64"),
|
|
|
|
|
"Loss": cross_entropy.astype("float64")
|
|
|
|
|
}
|
|
|
|
|
self.attrs = {"soft_label": True}
|
|
|
|
|
|
|
|
|
@ -69,9 +69,8 @@ class TestSoftmaxWithCrossEntropyOp2(OpTest):
|
|
|
|
|
self.check_output()
|
|
|
|
|
|
|
|
|
|
def test_check_grad(self):
|
|
|
|
|
self.check_grad(["Logits"], "Loss", max_relative_error=0.05)
|
|
|
|
|
self.check_grad(["Logits"], "Loss")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
exit(0) # FIXME: xe has bug
|
|
|
|
|
unittest.main()
|
|
|
|
|