def dense_block(x, depth=5, growth_rate = 3):
nb_input_feature_map = x.shape[3].value
stored_features = x
for i in range(depth):
feature = composite_function(stored_features, growth_rate = growth_rate)
stored_features = concatenate([stored_features, feature], axis=3)
return stored_features
def composite_function(x, growth_rate):
if DenseNetB: #Add 1*1 convolution when using DenseNet B
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(kernel_size=(1,1), strides=1, filters = 4 * growth_rate, padding='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
output = Conv2D(kernel_size=(3,3), strides=1, filters = growth_rate, padding='same')(x)
return output
def dense_net(input_image, nb_blocks = 2):
x = Conv2D(kernel_size=(3,3), filters=8, strides=1, padding='same', activation='relu')(input_image)
for block in range(nb_blocks):
x = dense_block(x, depth=NB_DEPTH, growth_rate = GROWTH_RATE)
if not block == nb_blocks-1:
if DenseNetC:
theta = COMPRESSION_FACTOR
nb_transition_filter = int(x.shape[3].value * theta)
x = Conv2D(kernel_size=(1,1), filters=nb_transition_filter, strides=1, padding='same', activation='relu')(x)
x = AveragePooling2D(pool_size=(2,2), strides=2)(x)
x = Flatten()(x)
x = Dense(100, activation='relu')(x)
outputs = Dense(10, activation='softmax', kernel_initializer='he_normal')(x)
return outputs