fix ModelConfig.proto, addto_layer and concat_layer

enforce_failed
chengduoZH 8 years ago
parent fb69d38c4b
commit eaf23cb1e8

@ -517,7 +517,7 @@ message LayerConfig {
optional double delta = 57 [ default = 1.0 ];
// for 3D data
optional double depth = 58 [ default = 1 ];
optional uint64 depth = 58 [ default = 1 ];
}
message EvaluatorConfig {

@ -2735,9 +2735,21 @@ class AddToLayer(LayerBase):
super(AddToLayer, self).__init__(
name, 'addto', 0, inputs=inputs, **xargs)
config_assert(len(inputs) > 0, 'inputs cannot be empty for AddToLayer')
for input_index in xrange(len(self.inputs)):
input_layer = self.get_input_layer(input_index)
self.set_layer_size(input_layer.size)
if len(self.inputs) > 1:
assert len(self.inputs) == 2
assert self.get_input_layer(0).size == self.get_input_layer(1).size
assert self.get_input_layer(0).depth == self.get_input_layer(
1).depth
assert self.get_input_layer(0).height == self.get_input_layer(
1).height
assert self.get_input_layer(0).width == self.get_input_layer(
1).width
self.set_layer_size(self.get_input_layer(0).size)
self.set_layer_height_width(self.get_input_layer(0).height, \
self.get_input_layer(0).width)
self.set_layer_depth(self.get_input_layer(0).depth)
self.create_bias_parameter(bias, self.config.size)
@ -3422,6 +3434,9 @@ class ConcatenateLayer(LayerBase):
if self.config.size == 0:
size += input_layer.size
self.set_layer_height_width(self.get_input_layer(0).height, \
self.get_input_layer(0).width)
self.set_layer_depth(self.get_input_layer(0).depth)
self.set_layer_size(size)

@ -352,6 +352,10 @@ class LayerOutput(object):
def height(self):
return cp.g_layer_map[self.full_name].height
@property
def depth(self):
return cp.g_layer_map[self.full_name].depth
def set_input(self, input):
"""
Set the input for a memory layer. Can only be used for memory layer

Loading…
Cancel
Save