|
|
|
@ -19,6 +19,7 @@ from ..layer_helper import LayerHelper
|
|
|
|
|
from ..initializer import Normal, Constant
|
|
|
|
|
from ..framework import Variable
|
|
|
|
|
from ..param_attr import ParamAttr
|
|
|
|
|
from layer_function_generator import autodoc
|
|
|
|
|
from tensor import concat
|
|
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
@ -57,6 +58,7 @@ __all__ = [
|
|
|
|
|
'warpctc',
|
|
|
|
|
'sequence_reshape',
|
|
|
|
|
'transpose',
|
|
|
|
|
'nce',
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2190,6 +2192,61 @@ def sequence_reshape(input, new_dim):
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@autodoc()
|
|
|
|
|
def nce(input,
|
|
|
|
|
label,
|
|
|
|
|
num_total_classes,
|
|
|
|
|
sample_weight=None,
|
|
|
|
|
param_attr=None,
|
|
|
|
|
bias_attr=None,
|
|
|
|
|
num_neg_samples=None):
|
|
|
|
|
helper = LayerHelper('nce', **locals())
|
|
|
|
|
assert isinstance(input, Variable)
|
|
|
|
|
dim = input.shape[1]
|
|
|
|
|
assert isinstance(label, Variable)
|
|
|
|
|
num_true_class = label.shape[1]
|
|
|
|
|
w = helper.create_parameter(
|
|
|
|
|
attr=helper.param_attr,
|
|
|
|
|
shape=[num_total_classes, dim],
|
|
|
|
|
is_bias=False,
|
|
|
|
|
dtype=input.dtype)
|
|
|
|
|
b = helper.create_parameter(
|
|
|
|
|
attr=helper.bias_attr,
|
|
|
|
|
shape=[num_total_classes, 1],
|
|
|
|
|
is_bias=True,
|
|
|
|
|
dtype=input.dtype)
|
|
|
|
|
cost = helper.create_tmp_variable(dtype=input.dtype)
|
|
|
|
|
sample_logits = helper.create_tmp_variable(dtype=input.dtype)
|
|
|
|
|
sample_labels = helper.create_tmp_variable(dtype=label.dtype)
|
|
|
|
|
|
|
|
|
|
if num_neg_samples is None:
|
|
|
|
|
num_neg_samples = 10
|
|
|
|
|
else:
|
|
|
|
|
num_neg_samples = int(num_neg_samples)
|
|
|
|
|
|
|
|
|
|
attrs = {
|
|
|
|
|
'num_total_classes': int(num_total_classes),
|
|
|
|
|
'num_neg_samples': num_neg_samples
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
helper.append_op(
|
|
|
|
|
type='nce',
|
|
|
|
|
inputs={
|
|
|
|
|
'Input': input,
|
|
|
|
|
'Label': label,
|
|
|
|
|
'Weight': w,
|
|
|
|
|
'Bias': b,
|
|
|
|
|
'SampleWeight': sample_weight if sample_weight is not None else []
|
|
|
|
|
},
|
|
|
|
|
outputs={
|
|
|
|
|
'Cost': cost,
|
|
|
|
|
'SampleLogits': sample_logits,
|
|
|
|
|
'SampleLabels': sample_labels
|
|
|
|
|
},
|
|
|
|
|
attrs=attrs)
|
|
|
|
|
return cost / (num_neg_samples + 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def transpose(x, perm, name=None):
|
|
|
|
|
"""
|
|
|
|
|
**transpose Layer**
|
|
|
|
|