|
|
|
@ -89,6 +89,36 @@ tmp = img_pool_layer(input=tmp,
|
|
|
|
|
padding=1,
|
|
|
|
|
pool_type=MaxPooling())
|
|
|
|
|
|
|
|
|
|
tmp = img_conv_layer(input=tmp,
|
|
|
|
|
filter_size=3,
|
|
|
|
|
num_filters=32,
|
|
|
|
|
padding=1,
|
|
|
|
|
shared_biases=True,
|
|
|
|
|
act=LinearActivation(),
|
|
|
|
|
bias_attr=False)
|
|
|
|
|
|
|
|
|
|
tmp = batch_norm_layer(input=tmp,
|
|
|
|
|
use_global_stats=False,
|
|
|
|
|
act=ReluActivation())
|
|
|
|
|
|
|
|
|
|
c1 = img_conv_layer(input=tmp,
|
|
|
|
|
filter_size=1,
|
|
|
|
|
num_filters=32,
|
|
|
|
|
padding=0,
|
|
|
|
|
shared_biases=True,
|
|
|
|
|
act=ReluActivation())
|
|
|
|
|
|
|
|
|
|
c2 = img_conv_layer(input=tmp,
|
|
|
|
|
filter_size=3,
|
|
|
|
|
num_filters=32,
|
|
|
|
|
padding=1,
|
|
|
|
|
shared_biases=True,
|
|
|
|
|
act=ReluActivation())
|
|
|
|
|
|
|
|
|
|
tmp = addto_layer(input=[c1, c2],
|
|
|
|
|
act=ReluActivation(),
|
|
|
|
|
bias_attr=False)
|
|
|
|
|
|
|
|
|
|
tmp = fc_layer(input=tmp, size=64,
|
|
|
|
|
bias_attr=False,
|
|
|
|
|
act=TanhActivation())
|
|
|
|
|