|
|
|
@ -105,5 +105,107 @@ class TestCrossEntropyOp3(OpTest):
|
|
|
|
|
["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestCrossEntropyOp4(OpTest):
|
|
|
|
|
"""Test high rank tensor cross-entropy with discrete one-hot labels.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "cross_entropy"
|
|
|
|
|
shape = [10, 2, 4]
|
|
|
|
|
ins_num = np.prod(np.array(shape))
|
|
|
|
|
class_num = 10
|
|
|
|
|
|
|
|
|
|
X_2d = randomize_probability(ins_num, class_num, dtype='float64')
|
|
|
|
|
|
|
|
|
|
label_2d = np.random.randint(0, class_num, (ins_num, 1), dtype="int64")
|
|
|
|
|
cross_entropy_2d = np.asmatrix(
|
|
|
|
|
[[-np.log(X_2d[i][label_2d[i][0]])] for i in range(X_2d.shape[0])],
|
|
|
|
|
dtype="float64")
|
|
|
|
|
|
|
|
|
|
X = X_2d.reshape(shape + [class_num])
|
|
|
|
|
label = label_2d.reshape(shape + [1])
|
|
|
|
|
cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1])
|
|
|
|
|
|
|
|
|
|
self.inputs = {"X": X, "Label": label}
|
|
|
|
|
self.outputs = {"Y": cross_entropy}
|
|
|
|
|
self.attrs = {"soft_label": False}
|
|
|
|
|
|
|
|
|
|
def test_check_output(self):
|
|
|
|
|
self.check_output()
|
|
|
|
|
|
|
|
|
|
def test_check_grad(self):
|
|
|
|
|
self.check_grad(["X"], "Y", numeric_grad_delta=0.001)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestCrossEntropyOp5(OpTest):
|
|
|
|
|
"""Test high rank tensor cross-entropy with vectorized soft labels.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "cross_entropy"
|
|
|
|
|
shape = [4, 3]
|
|
|
|
|
ins_num = np.prod(np.array(shape))
|
|
|
|
|
class_num = 37
|
|
|
|
|
|
|
|
|
|
X_2d = randomize_probability(ins_num, class_num)
|
|
|
|
|
label_2d = np.random.uniform(0.1, 1.0,
|
|
|
|
|
[ins_num, class_num]).astype("float32")
|
|
|
|
|
label_2d /= label_2d.sum(axis=1, keepdims=True)
|
|
|
|
|
cross_entropy_2d = (-label_2d * np.log(X_2d)).sum(
|
|
|
|
|
axis=1, keepdims=True).astype("float32")
|
|
|
|
|
|
|
|
|
|
X = X_2d.reshape(shape + [class_num])
|
|
|
|
|
label = label_2d.reshape(shape + [class_num])
|
|
|
|
|
cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1])
|
|
|
|
|
|
|
|
|
|
self.inputs = {"X": X, "Label": label}
|
|
|
|
|
self.outputs = {"Y": cross_entropy}
|
|
|
|
|
self.attrs = {"soft_label": True}
|
|
|
|
|
|
|
|
|
|
def test_check_output(self):
|
|
|
|
|
self.check_output()
|
|
|
|
|
|
|
|
|
|
def test_check_grad(self):
|
|
|
|
|
self.check_grad(
|
|
|
|
|
["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestCrossEntropyOp6(OpTest):
|
|
|
|
|
"""Test high rank tensor cross-entropy with vectorized one-hot representation of labels.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.op_type = "cross_entropy"
|
|
|
|
|
shape = [4, 3, 2]
|
|
|
|
|
ins_num = np.prod(np.array(shape))
|
|
|
|
|
class_num = 17
|
|
|
|
|
|
|
|
|
|
X_2d = randomize_probability(ins_num, class_num)
|
|
|
|
|
label_index_2d = np.random.randint(
|
|
|
|
|
0, class_num, (ins_num), dtype="int32")
|
|
|
|
|
label_2d = np.zeros(X_2d.shape)
|
|
|
|
|
label_2d[np.arange(ins_num), label_index_2d] = 1
|
|
|
|
|
|
|
|
|
|
cross_entropy_2d = np.asmatrix(
|
|
|
|
|
[[-np.log(X_2d[i][label_index_2d[i]])]
|
|
|
|
|
for i in range(X_2d.shape[0])],
|
|
|
|
|
dtype="float32")
|
|
|
|
|
|
|
|
|
|
X = X_2d.reshape(shape + [class_num])
|
|
|
|
|
label = label_2d.reshape(shape + [class_num])
|
|
|
|
|
cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1])
|
|
|
|
|
|
|
|
|
|
self.inputs = {"X": X, "Label": label.astype(np.float32)}
|
|
|
|
|
self.outputs = {"Y": cross_entropy}
|
|
|
|
|
self.attrs = {"soft_label": True}
|
|
|
|
|
|
|
|
|
|
def test_check_output(self):
|
|
|
|
|
self.check_output()
|
|
|
|
|
|
|
|
|
|
def test_check_grad(self):
|
|
|
|
|
self.check_grad(
|
|
|
|
|
["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
unittest.main()
|
|
|
|
|