Skip to content

Commit 5ac46ce

Browse files
committed
"titu1994#14 Fix missing link from last up block to Conv2D"
Include 'x_up' on the upsampling dense blocks allows the 'include_top' layer to get the full feature map at the end of the upsampling branch. This commit also changes the 'concatenate' inside __dense_block as the previous method was failing to run on TensorFlow backend. This was tested using Keras 2.0.2 and TensorFlow 1.1.0
1 parent f01ec9c commit 5ac46ce

File tree

1 file changed

+10
-9
lines changed

1 file changed

+10
-9
lines changed

‎densenet.py‎

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -381,10 +381,11 @@ def __dense_block(x, nb_layers, nb_filter, growth_rate, bottleneck=False, dropou
381381
x_list = [x]
382382

383383
for i in range(nb_layers):
384-
x = __conv_block(x, growth_rate, bottleneck, dropout_rate, weight_decay)
385-
x_list.append(x)
384+
cb = __conv_block(x, growth_rate, bottleneck, dropout_rate, weight_decay)
385+
x_list.append(cb)
386386

387-
x = concatenate(x_list, axis=concat_axis)
387+
# x = concatenate(x_list, axis=concat_axis)
388+
x = concatenate([x, cb], axis=concat_axis)
388389

389390
if grow_nb_filters:
390391
nb_filter += growth_rate
@@ -613,14 +614,14 @@ def __create_fcn_dense_net(nb_classes, img_input, include_top, nb_dense_block=5,
613614
x = concatenate([t, skip_list[block_idx]], axis=concat_axis)
614615

615616
# Dont allow the feature map size to grow in upsampling dense blocks
616-
_, nb_filter, concat_list = __dense_block(x, nb_layers[nb_dense_block + block_idx + 1], nb_filter=growth_rate,
617-
growth_rate=growth_rate, dropout_rate=dropout_rate,
618-
weight_decay=weight_decay,
619-
return_concat_list=True, grow_nb_filters=False)
617+
x_up, nb_filter, concat_list = __dense_block(x, nb_layers[nb_dense_block + block_idx + 1], nb_filter=growth_rate,
618+
growth_rate=growth_rate, dropout_rate=dropout_rate,
619+
weight_decay=weight_decay,
620+
return_concat_list=True, grow_nb_filters=False)
620621

621622
if include_top:
622623
x = Conv2D(nb_classes, (1, 1), activation='linear', padding='same', kernel_regularizer=l2(weight_decay),
623-
use_bias=False)(x)
624+
use_bias=False)(x_up)
624625

625626
if K.image_data_format() == 'channels_first':
626627
channel, row, col = input_shape
@@ -631,4 +632,4 @@ def __create_fcn_dense_net(nb_classes, img_input, include_top, nb_dense_block=5,
631632
x = Activation(activation)(x)
632633
x = Reshape((row, col, nb_classes))(x)
633634

634-
return x
635+
return x

0 commit comments

Comments
 (0)