|
|
|
@ -883,8 +883,6 @@ class MarginRankingLoss(fluid.dygraph.Layer):
|
|
|
|
|
|
|
|
|
|
class CTCLoss(fluid.dygraph.Layer):
|
|
|
|
|
"""
|
|
|
|
|
:alias_main: paddle.nn.CTCLoss
|
|
|
|
|
:alias: paddle.nn.CTCLoss, paddle.nn.layer.CTCLoss, paddle.nn.layer.loss.CTCLoss
|
|
|
|
|
|
|
|
|
|
An operator integrating the open source Warp-CTC library (https://github.com/baidu-research/warp-ctc)
|
|
|
|
|
to compute Connectionist Temporal Classification (CTC) loss.
|
|
|
|
@ -941,7 +939,6 @@ class CTCLoss(fluid.dygraph.Layer):
|
|
|
|
|
input_lengths = np.array([5, 5]).astype("int64")
|
|
|
|
|
label_lengths = np.array([3, 3]).astype("int64")
|
|
|
|
|
|
|
|
|
|
paddle.disable_static()
|
|
|
|
|
log_probs = paddle.to_tensor(log_probs)
|
|
|
|
|
labels = paddle.to_tensor(labels)
|
|
|
|
|
input_lengths = paddle.to_tensor(input_lengths)
|
|
|
|
@ -950,12 +947,12 @@ class CTCLoss(fluid.dygraph.Layer):
|
|
|
|
|
loss = paddle.nn.CTCLoss(blank=0, reduction='none')(log_probs, labels,
|
|
|
|
|
input_lengths,
|
|
|
|
|
label_lengths)
|
|
|
|
|
print(loss.numpy()) #[3.9179852 2.9076521]
|
|
|
|
|
print(loss) #[3.9179852 2.9076521]
|
|
|
|
|
|
|
|
|
|
loss = paddle.nn.CTCLoss(blank=0, reduction='mean')(log_probs, labels,
|
|
|
|
|
input_lengths,
|
|
|
|
|
label_lengths)
|
|
|
|
|
print(loss.numpy()) #[1.1376063]
|
|
|
|
|
print(loss) #[1.1376063]
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, blank=0, reduction='mean'):
|
|
|
|
|