|
|
|
@ -5,7 +5,8 @@ import re
|
|
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
|
'fc', 'data', 'cross_entropy', 'conv2d', 'pool2d', 'embedding', 'concat',
|
|
|
|
|
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'accuracy'
|
|
|
|
|
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'sums', 'cos_sim',
|
|
|
|
|
'batch_norm', 'accuracy'
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -165,18 +166,6 @@ _create_op_func_('dropout')
|
|
|
|
|
_create_op_func_('reshape')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cast(x, data_type, program=None):
|
|
|
|
|
helper = LayerHelper('cast', **locals())
|
|
|
|
|
out = helper.create_tmp_variable(dtype=data_type)
|
|
|
|
|
helper.append_op(
|
|
|
|
|
type='cast',
|
|
|
|
|
inputs={'X': [x]},
|
|
|
|
|
outputs={'Out': [out]},
|
|
|
|
|
attrs={'in_data_type': x.data_type,
|
|
|
|
|
'out_data_type': out.data_type})
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cast(x, data_type, program=None):
|
|
|
|
|
helper = LayerHelper('cast', **locals())
|
|
|
|
|
out = helper.create_tmp_variable(dtype=data_type)
|
|
|
|
@ -191,9 +180,7 @@ def cast(x, data_type, program=None):
|
|
|
|
|
|
|
|
|
|
def concat(input, axis, program=None, init_program=None):
|
|
|
|
|
helper = LayerHelper('concat', **locals())
|
|
|
|
|
if not isinstance(input, list) and not isinstance(input, tuple):
|
|
|
|
|
input = [input]
|
|
|
|
|
out = helper.create_tmp_variable(dtype=input[0].data_type)
|
|
|
|
|
out = helper.create_tmp_variable(dtype=helper.input_dtype())
|
|
|
|
|
helper.append_op(
|
|
|
|
|
type='concat',
|
|
|
|
|
inputs={'X': input},
|
|
|
|
@ -202,6 +189,28 @@ def concat(input, axis, program=None, init_program=None):
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sums(input, program=None, init_program=None):
|
|
|
|
|
helper = LayerHelper('sum', **locals())
|
|
|
|
|
out = helper.create_tmp_variable(dtype=helper.input_dtype())
|
|
|
|
|
helper.append_op(type='sum', inputs={'X': [input]}, outputs={'Out': out})
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cos_sim(X, Y, program=None, init_program=None):
|
|
|
|
|
helper = LayerHelper('cos_sim', **locals())
|
|
|
|
|
out = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
|
|
|
|
|
xnorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
|
|
|
|
|
ynorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
|
|
|
|
|
helper.append_op(
|
|
|
|
|
type='cos_sim',
|
|
|
|
|
inputs={'X': [X],
|
|
|
|
|
'Y': [Y]},
|
|
|
|
|
outputs={'Out': [out],
|
|
|
|
|
'XNorm': [xnorm],
|
|
|
|
|
'YNorm': [ynorm]})
|
|
|
|
|
return out, xnorm, ynorm
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cross_entropy(input, label, **kwargs):
|
|
|
|
|
helper = LayerHelper('cross_entropy', **kwargs)
|
|
|
|
|
out = helper.create_tmp_variable(dtype=input.data_type)
|
|
|
|
@ -254,9 +263,7 @@ def accuracy(input, label, k=1, **kwargs):
|
|
|
|
|
|
|
|
|
|
def sequence_conv(input,
|
|
|
|
|
num_filters,
|
|
|
|
|
name=None,
|
|
|
|
|
filter_size=3,
|
|
|
|
|
act=None,
|
|
|
|
|
stride=1,
|
|
|
|
|
padding=None,
|
|
|
|
|
bias_attr=None,
|
|
|
|
@ -270,7 +277,7 @@ def sequence_conv(input,
|
|
|
|
|
helper = LayerHelper('sequence_conv', **locals())
|
|
|
|
|
dtype = helper.input_dtype()
|
|
|
|
|
|
|
|
|
|
filter_shape = [num_filters, filter_size]
|
|
|
|
|
filter_shape = [filter_size * input.shape[1], num_filters]
|
|
|
|
|
filter = helper.create_parameter(
|
|
|
|
|
attr=helper.param_attr, shape=filter_shape, dtype=dtype)
|
|
|
|
|
pre_bias = helper.create_tmp_variable(dtype)
|
|
|
|
@ -279,7 +286,7 @@ def sequence_conv(input,
|
|
|
|
|
type='sequence_conv',
|
|
|
|
|
inputs={
|
|
|
|
|
'X': [input],
|
|
|
|
|
'Filter': filter,
|
|
|
|
|
'Filter': [filter],
|
|
|
|
|
},
|
|
|
|
|
outputs={"Out": pre_bias},
|
|
|
|
|
attrs={
|
|
|
|
@ -287,7 +294,6 @@ def sequence_conv(input,
|
|
|
|
|
'context_start': 0,
|
|
|
|
|
'context_length': filter_size
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
pre_act = helper.append_bias_op(pre_bias)
|
|
|
|
|
return helper.append_activation(pre_act)
|
|
|
|
|
|
|
|
|
@ -344,31 +350,32 @@ def conv2d(input,
|
|
|
|
|
return helper.append_activation(pre_act)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sequence_pool(input,
|
|
|
|
|
pool_size,
|
|
|
|
|
pool_type,
|
|
|
|
|
pool_stride=1,
|
|
|
|
|
pool_padding=0,
|
|
|
|
|
global_pooling=False,
|
|
|
|
|
program=None,
|
|
|
|
|
init_program=None):
|
|
|
|
|
def sequence_pool(input, pool_type, program=None, init_program=None):
|
|
|
|
|
# FIXME(dzh) : want to unify the argument of python layer
|
|
|
|
|
# function. So we ignore some unecessary attributes
|
|
|
|
|
|
|
|
|
|
ENUM_POOL_TYPE = set(["max", "avg", "sqrt", "last", "first"])
|
|
|
|
|
if pool_type not in ENUM_POOL_TYPE:
|
|
|
|
|
ENUM_POOL_TYPE = dict({
|
|
|
|
|
"AVERAGE": 0,
|
|
|
|
|
"SUM": 1,
|
|
|
|
|
"SQRT": 2,
|
|
|
|
|
"MAX": 3,
|
|
|
|
|
"LAST": 4,
|
|
|
|
|
"FIRST": 5
|
|
|
|
|
})
|
|
|
|
|
if pool_type.upper() not in ENUM_POOL_TYPE:
|
|
|
|
|
raise ValueError("Unknown pool_type: '%s'. It can only be %s.",
|
|
|
|
|
str(pool_type), " ".join(ENUM_POOL_TYPE))
|
|
|
|
|
str(pool_type), " ".join(ENUM_POOL_TYPE.keys()))
|
|
|
|
|
|
|
|
|
|
helper = LayerHelper('sequence_pool', **locals())
|
|
|
|
|
dtype = helper.input_dtype()
|
|
|
|
|
pool_out = helper.create_tmp_variable(dtype)
|
|
|
|
|
|
|
|
|
|
# FIXME(dzh): strategy
|
|
|
|
|
helper.append_op(
|
|
|
|
|
type="sequence_pool",
|
|
|
|
|
inputs={"X": [input]},
|
|
|
|
|
outputs={"Out": pool_out},
|
|
|
|
|
attrs={"strategy": pool_type})
|
|
|
|
|
outputs={"Out": [pool_out]},
|
|
|
|
|
attrs={"strategy": ENUM_POOL_TYPE[pool_type.upper()]})
|
|
|
|
|
|
|
|
|
|
return pool_out
|
|
|
|
|
|
|
|
|
|