@@ -68,14 +68,14 @@ def conv_output_height(height, padding, kernel_size, stride):
6868 return (height + 2 * padding - kernel_size )/ stride + 1
6969
7070
71- def activation (self , block , pre_node_name , input_shape ):
71+ def activation (self , block , pre_node_name , input_shape , id ):
7272 if block ['activation' ] != 'linear' :
7373 relu_layer = OrderedDict ()
7474 relu_layer ['input' ] = [pre_node_name ]
7575 if 'name' in block .keys ():
7676 relu_layer ['name' ] = '%s-act' % block ['name' ]
7777 else :
78- relu_layer ['name' ] = 'layer%d-act' % i
78+ relu_layer ['name' ] = 'layer%d-act' % id
7979 relu_layer ['type' ] = 'ReLU'
8080 relu_param = OrderedDict ()
8181 if block ['activation' ] == 'leaky' :
@@ -180,7 +180,7 @@ def build(self):
180180
181181 pre_node_name = bn_layer ['name' ]
182182
183- pre_node_name = self .activation (block , pre_node_name , input_shape )
183+ pre_node_name = self .activation (block , pre_node_name , input_shape , i )
184184
185185
186186 elif block ['type' ] == 'maxpool' :
@@ -301,7 +301,7 @@ def build(self):
301301 self .layer_num_map [i ] = shortcut_layer ['name' ]
302302 pre_node_name = shortcut_layer ['name' ]
303303
304- pre_node_name = self .activation (block , pre_node_name , input_shape )
304+ pre_node_name = self .activation (block , pre_node_name , input_shape , i )
305305
306306 elif block ['type' ] == 'connected' :
307307 fc_layer = OrderedDict ()
@@ -321,7 +321,7 @@ def build(self):
321321 self .layer_num_map [i ] = fc_layer ['name' ]
322322 pre_node_name = fc_layer ['name' ]
323323
324- pre_node_name = self .activation (block , pre_node_name , input_shape )
324+ pre_node_name = self .activation (block , pre_node_name , input_shape , i )
325325
326326 elif block ['type' ] == 'softmax' :
327327 sm_layer = OrderedDict ()
0 commit comments