2017-06-06 124 views
0

試圖按照一個完全卷積densenet在Keras 2.我更新的一些層中的函數調用以及從Keras 2 Deconv2D層

def transition_up(added, wd=0): 
    x = concat(added) 
    _, r, c, ch = x.get_shape().as_list() 
    return Deconvolution2D(ch, 3, 3, (None,r*2,c*2,ch), init='he_uniform', 
          border_mode='same', subsample=(2,2), W_regularizer=l2(wd))(x) 

的串連層更新爲

def transition_up(added, wd=0): 
    x = concat(added) 
    _, r, c, ch = x.get_shape().as_list() 
    return Deconv2D(ch, (3, 3), input_shape=(r*2, c*2, ch), padding='same', 
        kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x) 

this implementation但是在下行路徑後輸入不會像它應該那樣加倍。下面是完整的代碼與Keras 2

def conv(x, nf, sz, wd, p, stride=1): 
    x = Conv2D(nf, (sz, sz), strides=(stride, stride), padding='same', 
       kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x) 
    return dropout(x, p) 

def conv_relu_bn(x, nf, sz=3, wd=0, p=0, stride=1): 
    return conv(relu_bn(x), nf, sz, wd=wd, p=p, stride=stride) 

def dense_block(n, x, growth_rate, p, wd): 
    added = [] 
    for i in range(n): 
     b = conv_relu_bn(x, growth_rate, p=p, wd=wd) 
     print('x:', x, 'b:', b) 
     x = concat([x, b]) 
     added.append(b) 
    return x, added 

def transition_dn(x, p, wd): 
    return conv_relu_bn(x, x.get_shape().as_list()[-1], sz=1, p=p, wd=wd, stride=2) 

def down_path(x, nb_layers, growth_rate, p, wd): 
    skips = [] 
    for i, n in enumerate(nb_layers): 
     x, added = dense_block(n, x, growth_rate, p, wd) 
     skips.append(x) 
     x = transition_dn(x, p=p, wd=wd) 
    return skips, added 

def transition_up(added, wd=0): 
    x = concat(added) 
    _, r, c, ch = x.get_shape().as_list() 
    return Deconv2D(ch, (3, 3), input_shape=(r*2, c*2, ch), padding='same', 
        kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x) 

def up_path(added, skips, nb_layers, growth_rate, p, wd): 
    for i, n in enumerate(nb_layers): 

     x = transition_up(added, wd) 
     x = concat([x, skips[i]]) 
     x, added = dense_block(n, x, growth_rate, p, wd) 
    return x 

def reverse(x): 
    return list(reversed(x)) 

def create_tiramisu(nb_classes, img_input, nb_dense_block=6, growth_rate=16, nb_filter=48, 
        nb_layers_per_block=5, p=None, wd=0): 
    if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple: 
     nb_layers = list(nb_layers_per_block) 
    else: 
     nb_layers = [nb_layers_per_block] * nb_dense_block 

    x = conv(img_input, nb_filter, 3, wd, 0) 
    skips, added = down_path(x, nb_layers, growth_rate, p, wd) 
    x = up_path(added, reverse(skips[:-1]), reverse(nb_layers[:-1]), growth_rate, p, wd) 
    x = conv(x, nb_classes, 1, wd, 0) 
    _, r, c, f = x.get_shape().as_list() 
    x = Reshape((-1, nb_classes)) (x) 
    return Activation('softmax') (x) 

input_shape = (224, 224, 3) 
img_input = Input(shape=input_shape) 
x = create_tiramisu(12, img_input, nb_layers_per_block=[4, 5, 7, 10, 12, 15], p=0.2, wd=1e-4) 

創建模型這是堆棧跟蹤時,它試圖創建Keras 2語法模型。在上採樣路徑開始時,連接層的一個輸入不會從(None,7,7,656)中翻倍(None,14,14,656)。 Keras 1語法不會發生這種情況,我也可以發佈。

--------------------------------------------------------------------------- 
ValueError        Traceback (most recent call last) 
<ipython-input-133-fb96a61666a1> in <module>() 
----> 1 x = create_tiramisu(12, img_input, nb_layers_per_block=[4, 5, 7, 10, 12, 15], p=0.2, wd=1e-4) 

<ipython-input-131-f58faf58b799> in create_tiramisu(nb_classes, img_input, nb_dense_block, growth_rate, nb_filter, nb_layers_per_block, p, wd) 
    62  print('p', p) 
    63  print('wd', wd) 
---> 64  x = up_path(added, reverse(skips[:-1]), reverse(nb_layers[:-1]), growth_rate, p, wd) 
    65  x = conv(x, nb_classes, 1, wd, 0) 
    66  _, r, c, f = x.get_shape().as_list() 

<ipython-input-131-f58faf58b799> in up_path(added, skips, nb_layers, growth_rate, p, wd) 
    38   print('up path', 'i:', i, 'n:', n) 
    39   x = transition_up(added, wd) 
---> 40   x = concat([x, skips[i]]) 
    41   x, added = dense_block(n, x, growth_rate, p, wd) 
    42  return x 

<ipython-input-130-c6d2942be960> in concat(xs) 
     5 def bn(x): return BatchNormalization(axis=-1)(x) 
     6 def relu_bn(x): return relu(bn(x)) 
----> 7 def concat(xs): return concatenate(xs, axis=-1) 

/home/clu/anaconda3/lib/python3.6/site-packages/keras/layers/merge.py in concatenate(inputs, axis, **kwargs) 
    389   A tensor, the concatenation of the inputs alongside axis `axis`. 
    390  """ 
--> 391  return Concatenate(axis=axis, **kwargs)(inputs) 
    392 
    393 

/home/clu/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in __call__(self, inputs, **kwargs) 
    528      self.build(input_shapes[0]) 
    529     else: 
--> 530      self.build(input_shapes) 
    531     self.built = True 
    532 

/home/clu/anaconda3/lib/python3.6/site-packages/keras/layers/merge.py in build(self, input_shape) 
    158        'inputs with matching shapes ' 
    159        'except for the concat axis. ' 
--> 160        'Got inputs shapes: %s' % (input_shape)) 
    161 
    162  def call(self, inputs): 

ValueError: `Concatenate` layer requires inputs with matching shapes except for the concat axis. Got inputs shapes: [(None, 7, 7, 240), (None, 14, 14, 656)] 
+1

你不應該使用2的步幅? – marcopah

+0

你是對的。謝謝。 – Char

回答

1

在解卷積層中使用2的步幅。