Python tensorflow.keras.layers.BatchNormalization() Examples

The following are 30 code examples of tensorflow.keras.layers.BatchNormalization(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.keras.layers , or try the search function .
Example #1
Source File: model.py    From DexiNed with MIT License 7 votes vote down vote up
def __init__(self, out_features,**kwargs):
        super(_DenseLayer, self).__init__(**kwargs)
        k_reg = None if w_decay is None else l2(w_decay)
        self.layers = []
        self.layers.append(tf.keras.Sequential(
            [
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                kernel_regularizer=k_reg),
                layers.BatchNormalization(),
                layers.ReLU(),
                layers.Conv2D(
                    filters=out_features, kernel_size=(3,3), strides=(1,1), padding='same',
                    use_bias=True, kernel_initializer=weight_init,
                    kernel_regularizer=k_reg),
                layers.BatchNormalization(),
            ])) # first relu can be not needed 
Example #2
Source File: multiclass_3D_CNN.py    From CNNArt with Apache License 2.0 6 votes vote down vote up
def buildModel(patchShape, numClasses):
    input = Input(shape=patchShape)
    n_base_fileter = 32
    _handle_data_format()
    conv = Conv3D(filters=n_base_fileter, kernel_size=(7, 7, 7),
                  strides=(2, 2, 2), kernel_initializer="he_normal",
                  )(input)
    norm = BatchNormalization(axis=CHANNEL_AXIS)(conv)
    conv1 = Activation("relu")(norm)
    pool1 = MaxPooling3D(pool_size=(3, 3, 3), strides=(2, 2, 2),
                         padding="same")(conv1)
    flatten1 = Flatten()(pool1)
    dense = Dense(units=numClasses,
                  kernel_initializer="he_normal",
                  activation="softmax",
                  kernel_regularizer=l2(1e-4))(flatten1)
    model = Model(inputs=input, outputs=dense)
    return model 
Example #3
Source File: inception_resnet_v1.py    From TripletLossFace with MIT License 6 votes vote down vote up
def conv2d_bn(x,
              filters,
              kernel_size,
              strides=1,
              padding='same',
              activation='relu',
              use_bias=False,
              name=None):
    x = Conv2D(filters,
               kernel_size,
               strides=strides,
               padding=padding,
               use_bias=use_bias,
               name=name)(x)
    if not use_bias:
        bn_axis = 1 if K.image_data_format() == 'channels_first' else 3
        bn_name = _generate_layer_name('BatchNorm', prefix=name)
        x = BatchNormalization(axis=bn_axis, momentum=0.995, epsilon=0.001,
                               scale=False, name=bn_name)(x)
    if activation is not None:
        ac_name = _generate_layer_name('Activation', prefix=name)
        x = Activation(activation, name=ac_name)(x)
    return x 
Example #4
Source File: seqtoseq.py    From deepchem with MIT License 6 votes vote down vote up
def _create_encoder(self, n_layers, dropout):
    """Create the encoder as a tf.keras.Model."""
    input = self._create_features()
    gather_indices = Input(shape=(2,), dtype=tf.int32)
    prev_layer = input
    for i in range(len(self._filter_sizes)):
      filter_size = self._filter_sizes[i]
      kernel_size = self._kernel_sizes[i]
      if dropout > 0.0:
        prev_layer = Dropout(rate=dropout)(prev_layer)
      prev_layer = Conv1D(
          filters=filter_size, kernel_size=kernel_size,
          activation=tf.nn.relu)(prev_layer)
    prev_layer = Flatten()(prev_layer)
    prev_layer = Dense(
        self._decoder_dimension, activation=tf.nn.relu)(prev_layer)
    prev_layer = BatchNormalization()(prev_layer)
    return tf.keras.Model(inputs=[input, gather_indices], outputs=prev_layer) 
Example #5
Source File: model.py    From DexiNed with MIT License 6 votes vote down vote up
def __init__(self, mid_features, out_features=None, stride=(1,1),
                 use_bn=True,use_act=True,**kwargs):
        super(DoubleConvBlock, self).__init__(**kwargs)
        self.use_bn =use_bn
        self.use_act =use_act
        out_features = mid_features if out_features is None else out_features
        k_reg = None if w_decay is None else l2(w_decay)

        self.conv1 = layers.Conv2D(
            filters=mid_features, kernel_size=(3, 3), strides=stride, padding='same',
        use_bias=True, kernel_initializer=weight_init,
        kernel_regularizer=k_reg)
        self.bn1 = layers.BatchNormalization()

        self.conv2 = layers.Conv2D(
            filters=out_features, kernel_size=(3, 3), padding='same',strides=(1,1),
        use_bias=True, kernel_initializer=weight_init,
        kernel_regularizer=k_reg)
        self.bn2 = layers.BatchNormalization()
        self.relu = layers.ReLU() 
Example #6
Source File: residual.py    From MIScnn with GNU General Public License v3.0 6 votes vote down vote up
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
                       ba_norm_momentum):
    up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
                     padding='same')(input), concatenate_link], axis=-1)
    conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    shortcut = Conv2D(neurons, (1, 1), activation='relu', padding="same")(up)
    add_layer = add([shortcut, conv2])
    return add_layer

#-----------------------------------------------------#
#                   Subroutines 3D                    #
#-----------------------------------------------------#
# Create a contracting layer 
Example #7
Source File: multiRes.py    From MIScnn with GNU General Public License v3.0 6 votes vote down vote up
def trans_conv3d_bn(x, filters, num_row, num_col, num_z, padding='same', strides=(2, 2, 2), name=None):
    '''
    2D Transposed Convolutional layers

    Arguments:
        x {keras layer} -- input layer
        filters {int} -- number of filters
        num_row {int} -- number of rows in filters
        num_col {int} -- number of columns in filters
        num_z {int} -- length along z axis in filters

    Keyword Arguments:
        padding {str} -- mode of padding (default: {'same'})
        strides {tuple} -- stride of convolution operation (default: {(2, 2, 2)})
        name {str} -- name of the layer (default: {None})

    Returns:
        [keras layer] -- [output layer]
    '''


    x = Conv3DTranspose(filters, (num_row, num_col, num_z), strides=strides, padding=padding)(x)
    x = BatchNormalization(axis=4, scale=False)(x)

    return x 
Example #8
Source File: train.py    From object-localization with MIT License 6 votes vote down vote up
def create_model(trainable=False):
    model = MobileNetV2(input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3), include_top=False, alpha=ALPHA, weights="imagenet")

    for layer in model.layers:
        layer.trainable = trainable

    block = model.get_layer("block_16_project_BN").output

    x = Conv2D(112, padding="same", kernel_size=3, strides=1, activation="relu")(block)
    x = Conv2D(112, padding="same", kernel_size=3, strides=1, use_bias=False)(x)
    x = BatchNormalization()(x)
    x = Activation("relu")(x)

    x = Conv2D(5, padding="same", kernel_size=1, activation="sigmoid")(x)

    model = Model(inputs=model.input, outputs=x)

    # divide by 2 since d/dweight learning_rate * weight^2 = 2 * learning_rate * weight
    # see https://arxiv.org/pdf/1711.05101.pdf
    regularizer = l2(WEIGHT_DECAY / 2)
    for weight in model.trainable_weights:
        with tf.keras.backend.name_scope("weight_regularizer"):
            model.add_loss(regularizer(weight)) # in tf2.0: lambda: regularizer(weight)

    return model 
Example #9
Source File: dense.py    From MIScnn with GNU General Public License v3.0 6 votes vote down vote up
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
                       ba_norm_momentum):
    up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
                     padding='same')(input), concatenate_link], axis=-1)
    conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conc1 = concatenate([up, conv1], axis=-1)
    conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conc1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    conc2 = concatenate([up, conv2], axis=-1)
    return conc2

#-----------------------------------------------------#
#                   Subroutines 3D                    #
#-----------------------------------------------------#
# Create a contracting layer 
Example #10
Source File: model.py    From Advanced-Deep-Learning-with-Keras with MIT License 6 votes vote down vote up
def conv_layer(inputs,
               filters=32,
               kernel_size=3,
               strides=1,
               use_maxpool=True,
               postfix=None,
               activation=None):

    x = conv2d(inputs,
               filters=filters,
               kernel_size=kernel_size,
               strides=strides,
               name='conv'+postfix)
    x = BatchNormalization(name="bn"+postfix)(x)
    x = ELU(name='elu'+postfix)(x)
    if use_maxpool:
        x = MaxPooling2D(name='pool'+postfix)(x)
    return x 
Example #11
Source File: model.py    From Advanced-Deep-Learning-with-Keras with MIT License 6 votes vote down vote up
def conv_layer(inputs,
               filters=32,
               kernel_size=3,
               strides=1,
               use_maxpool=True,
               postfix=None,
               activation=None):
    """Helper function to build Conv2D-BN-ReLU layer
        with optional MaxPooling2D.
    """

    x = Conv2D(filters=filters,
               kernel_size=kernel_size,
               strides=strides,
               kernel_initializer='he_normal',
               name="conv_"+postfix,
               padding='same')(inputs)
    x = BatchNormalization(name="bn_"+postfix)(x)
    x = Activation('relu', name='relu_'+postfix)(x)
    if use_maxpool:
        x = MaxPooling2D(name='pool'+postfix)(x)
    return x 
Example #12
Source File: model.py    From Advanced-Deep-Learning-with-Keras with MIT License 6 votes vote down vote up
def tconv_layer(inputs,
                filters=32,
                kernel_size=3,
                strides=2,
                postfix=None):
    """Helper function to build Conv2DTranspose-BN-ReLU 
        layer
    """
    x = Conv2DTranspose(filters=filters,
                        kernel_size=kernel_size,
                        strides=strides,
                        padding='same',
                        kernel_initializer='he_normal',
                        name='tconv_'+postfix)(inputs)
    x = BatchNormalization(name="bn_"+postfix)(x)
    x = Activation('relu', name='relu_'+postfix)(x)
    return x 
Example #13
Source File: multiRes.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def MultiResBlock_3D(U, inp, alpha = 1.67):
    '''
    MultiRes Block

    Arguments:
        U {int} -- Number of filters in a corrsponding UNet stage
        inp {keras layer} -- input layer

    Returns:
        [keras layer] -- [output layer]
    '''

    W = alpha * U

    shortcut = inp

    shortcut = conv3d_bn(shortcut, int(W*0.167) + int(W*0.333) + int(W*0.5), 1, 1, 1, activation=None, padding='same')

    conv3x3 = conv3d_bn(inp, int(W*0.167), 3, 3, 3, activation='relu', padding='same')

    conv5x5 = conv3d_bn(conv3x3, int(W*0.333), 3, 3, 3, activation='relu', padding='same')

    conv7x7 = conv3d_bn(conv5x5, int(W*0.5), 3, 3, 3, activation='relu', padding='same')

    out = concatenate([conv3x3, conv5x5, conv7x7], axis=4)
    out = BatchNormalization(axis=4)(out)

    out = add([shortcut, out])
    out = Activation('relu')(out)
    out = BatchNormalization(axis=4)(out)

    return out 
Example #14
Source File: multiRes.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def ResPath_3D(filters, length, inp):
    '''
    ResPath

    Arguments:
        filters {int} -- [description]
        length {int} -- length of ResPath
        inp {keras layer} -- input layer

    Returns:
        [keras layer] -- [output layer]
    '''

    shortcut = inp
    shortcut = conv3d_bn(shortcut, filters , 1, 1, 1, activation=None, padding='same')

    out = conv3d_bn(inp, filters, 3, 3, 3, activation='relu', padding='same')

    out = add([shortcut, out])
    out = Activation('relu')(out)
    out = BatchNormalization(axis=4)(out)

    for i in range(length-1):

        shortcut = out
        shortcut = conv3d_bn(shortcut, filters , 1, 1, 1, activation=None, padding='same')

        out = conv3d_bn(out, filters, 3, 3, 3, activation='relu', padding='same')

        out = add([shortcut, out])
        out = Activation('relu')(out)
        out = BatchNormalization(axis=4)(out)


    return out

#-----------------------------------------------------#
#             Subroutines for 2D version              #
#-----------------------------------------------------# 
Example #15
Source File: standard.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def contracting_layer_2D(input, neurons, ba_norm, ba_norm_momentum):
    conv1 = Conv2D(neurons, (3,3), activation='relu', padding='same')(input)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv2D(neurons, (3,3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    pool = MaxPooling2D(pool_size=(2, 2))(conv2)
    return pool, conv2

# Create the middle layer between the contracting and expanding layers 
Example #16
Source File: compact.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def middle_layer_2D(input, neurons, ba_norm, ba_norm_momentum):
    conv_m1 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(input)
    if ba_norm : conv_m1 = BatchNormalization(momentum=ba_norm_momentum)(conv_m1)
    conv_m2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv_m1)
    if ba_norm : conv_m2 = BatchNormalization(momentum=ba_norm_momentum)(conv_m2)
    conc = concatenate([input, conv_m2], axis=-1)
    return conc

# Create an expanding layer 
Example #17
Source File: compact.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def contracting_layer_2D(input, neurons, ba_norm, ba_norm_momentum):
    conv1 = Conv2D(neurons, (3,3), activation='relu', padding='same')(input)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv2D(neurons, (3,3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    conc = concatenate([input, conv2], axis=-1)
    pool = MaxPooling2D(pool_size=(2, 2))(conc)
    return pool, conc

# Create the middle layer between the contracting and expanding layers 
Example #18
Source File: dense.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def middle_layer_3D(input, neurons, ba_norm, ba_norm_momentum):
    conv_m1 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(input)
    if ba_norm : conv_m1 = BatchNormalization(momentum=ba_norm_momentum)(conv_m1)
    conc1 = concatenate([input, conv_m1], axis=-1)
    conv_m2 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(conc1)
    if ba_norm : conv_m2 = BatchNormalization(momentum=ba_norm_momentum)(conv_m2)
    conc2 = concatenate([input, conv_m2], axis=-1)
    return conc2

# Create an expanding layer 
Example #19
Source File: dense.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def contracting_layer_3D(input, neurons, ba_norm, ba_norm_momentum):
    conv1 = Conv3D(neurons, (3,3,3), activation='relu', padding='same')(input)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conc1 = concatenate([input, conv1], axis=-1)
    conv2 = Conv3D(neurons, (3,3,3), activation='relu', padding='same')(conc1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    conc2 = concatenate([input, conv2], axis=-1)
    pool = MaxPooling3D(pool_size=(2, 2, 2))(conc2)
    return pool, conc2

# Create the middle layer between the contracting and expanding layers 
Example #20
Source File: standard.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def contracting_layer_3D(input, neurons, ba_norm, ba_norm_momentum):
    conv1 = Conv3D(neurons, (3,3,3), activation='relu', padding='same')(input)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv3D(neurons, (3,3,3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    pool = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
    return pool, conv2

# Create the middle layer between the contracting and expanding layers 
Example #21
Source File: multiRes.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def conv2d_bn(x, filters, num_row, num_col, padding='same', strides=(1, 1), activation='relu', name=None):
    '''
    2D Convolutional layers

    Arguments:
        x {keras layer} -- input layer
        filters {int} -- number of filters
        num_row {int} -- number of rows in filters
        num_col {int} -- number of columns in filters

    Keyword Arguments:
        padding {str} -- mode of padding (default: {'same'})
        strides {tuple} -- stride of convolution operation (default: {(1, 1)})
        activation {str} -- activation function (default: {'relu'})
        name {str} -- name of the layer (default: {None})

    Returns:
        [keras layer] -- [output layer]
    '''

    x = Conv2D(filters, (num_row, num_col), strides=strides, padding=padding, use_bias=False)(x)
    x = BatchNormalization(axis=3, scale=False)(x)

    if(activation == None):
        return x

    x = Activation(activation, name=name)(x)

    return x 
Example #22
Source File: standard.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def middle_layer_3D(input, neurons, ba_norm, ba_norm_momentum):
    conv_m1 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(input)
    if ba_norm : conv_m1 = BatchNormalization(momentum=ba_norm_momentum)(conv_m1)
    conv_m2 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(conv_m1)
    if ba_norm : conv_m2 = BatchNormalization(momentum=ba_norm_momentum)(conv_m2)
    return conv_m2

# Create an expanding layer 
Example #23
Source File: dense.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def middle_layer_2D(input, neurons, ba_norm, ba_norm_momentum):
    conv_m1 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(input)
    if ba_norm : conv_m1 = BatchNormalization(momentum=ba_norm_momentum)(conv_m1)
    conc1 = concatenate([input, conv_m1], axis=-1)
    conv_m2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conc1)
    if ba_norm : conv_m2 = BatchNormalization(momentum=ba_norm_momentum)(conv_m2)
    conc2 = concatenate([input, conv_m2], axis=-1)
    return conc2

# Create an expanding layer 
Example #24
Source File: dense.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def contracting_layer_2D(input, neurons, ba_norm, ba_norm_momentum):
    conv1 = Conv2D(neurons, (3,3), activation='relu', padding='same')(input)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conc1 = concatenate([input, conv1], axis=-1)
    conv2 = Conv2D(neurons, (3,3), activation='relu', padding='same')(conc1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    conc2 = concatenate([input, conv2], axis=-1)
    pool = MaxPooling2D(pool_size=(2, 2))(conc2)
    return pool, conc2

# Create the middle layer between the contracting and expanding layers 
Example #25
Source File: train_keras_model.py    From gym-2048 with MIT License 5 votes vote down vote up
def build_model(board_size=4, board_layers=16, outputs=4, filters=64, residual_blocks=4):
  # Functional API model
  inputs = layers.Input(shape=(board_size * board_size * board_layers,))
  x = layers.Reshape((board_size, board_size, board_layers))(inputs)

  # Initial convolutional block
  x = layers.Conv2D(filters=filters, kernel_size=(3, 3), padding='same')(x)
  x = layers.BatchNormalization()(x)
  x = layers.Activation('relu')(x)

  # residual blocks
  for i in range(residual_blocks):
    # x at the start of a block
    temp_x = layers.Conv2D(filters=filters, kernel_size=(3, 3), padding='same')(x)
    temp_x = layers.BatchNormalization()(temp_x)
    temp_x = layers.Activation('relu')(temp_x)
    temp_x = layers.Conv2D(filters=filters, kernel_size=(3, 3), padding='same')(temp_x)
    temp_x = layers.BatchNormalization()(temp_x)
    x = layers.add([x, temp_x])
    x = layers.Activation('relu')(x)

  # policy head
  x = layers.Conv2D(filters=2, kernel_size=(1, 1), padding='same')(x)
  x = layers.BatchNormalization()(x)
  x = layers.Activation('relu')(x)
  x = layers.Flatten()(x)
  predictions = layers.Dense(outputs, activation='softmax')(x)

  # Create model
  return models.Model(inputs=inputs, outputs=predictions) 
Example #26
Source File: mv2_cpm.py    From tf2-mobile-pose-estimation with Apache License 2.0 5 votes vote down vote up
def _separable_conv(input, channels, kernel_size, strides):
    # 3x3 separable_conv2d
    x = layers.DepthwiseConv2D(kernel_size=kernel_size, strides=strides, padding="SAME",
                               kernel_regularizer=l2_regularizer_00004)(input)
    # activation
    x = layers.ReLU()(x)

    # 1x1 conv2d
    x = layers.Conv2D(filters=channels, kernel_size=(1, 1), strides=(1, 1), padding='SAME')(x)
    x = layers.BatchNormalization(momentum=0.999)(x)
    x = layers.ReLU(max_value=6)(x)

    return x 
Example #27
Source File: mv2_cpm.py    From tf2-mobile-pose-estimation with Apache License 2.0 5 votes vote down vote up
def _inverted_bottleneck(input, up_channel_rate, channels, is_subsample, kernel_size):
    if is_subsample:
        strides = (2, 2)
    else:
        strides = (1, 1)

    kernel_size = (kernel_size, kernel_size)

    # 1x1 conv2d
    x = layers.Conv2D(filters=up_channel_rate * input.shape[-1], kernel_size=(1, 1), strides=(1, 1), padding='SAME')(input)
    x = layers.BatchNormalization(momentum=0.999)(x)
    x = layers.ReLU(max_value=6)(x)

    # activation
    x = layers.ReLU()(x)

    # 3x3 separable_conv2d
    x = layers.DepthwiseConv2D(kernel_size=kernel_size, strides=strides, padding="SAME",
                               kernel_regularizer=l2_regularizer_00004)(x)
    # activation
    x = layers.ReLU()(x)

    # 1x1 conv2d
    x = layers.Conv2D(filters=channels, kernel_size=(1, 1), strides=(1, 1), padding='SAME')(x)
    x = layers.BatchNormalization(momentum=0.999)(x)
    x = layers.ReLU(max_value=6)(x)

    if input.shape[-1] == channels:
        x = input + x

    return x 
Example #28
Source File: mv2_hourglass.py    From tf2-mobile-pose-estimation with Apache License 2.0 5 votes vote down vote up
def _inverted_bottleneck(input, up_channel_rate, channels, is_subsample, kernel_size):
    if is_subsample:
        strides = (2, 2)
    else:
        strides = (1, 1)

    kernel_size = (kernel_size, kernel_size)

    # 1x1 conv2d
    x = layers.Conv2D(filters=up_channel_rate * input.shape[-1], kernel_size=(1, 1), strides=(1, 1), padding='SAME')(input)
    x = layers.BatchNormalization(momentum=0.999)(x)
    x = layers.ReLU(max_value=6)(x)

    # activation
    x = layers.ReLU()(x)

    # 3x3 separable_conv2d
    x = layers.DepthwiseConv2D(kernel_size=kernel_size, strides=strides, padding="SAME",
                               kernel_regularizer=l2_regularizer_00004)(x)
    # activation
    x = layers.ReLU()(x)

    # 1x1 conv2d
    x = layers.Conv2D(filters=channels, kernel_size=(1, 1), strides=(1, 1), padding='SAME')(x)
    x = layers.BatchNormalization(momentum=0.999)(x)
    x = layers.ReLU(max_value=6)(x)

    if input.shape[-1] == channels:
        x = input + x

    return x 
Example #29
Source File: standard.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def expanding_layer_3D(input, neurons, concatenate_link, ba_norm,
                       ba_norm_momentum):
    up = concatenate([Conv3DTranspose(neurons, (2, 2, 2), strides=(2, 2, 2),
                     padding='same')(input), concatenate_link], axis=4)
    conv1 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(up)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv3D(neurons, (3, 3, 3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    return conv2 
Example #30
Source File: compact.py    From MIScnn with GNU General Public License v3.0 5 votes vote down vote up
def expanding_layer_2D(input, neurons, concatenate_link, ba_norm,
                       ba_norm_momentum):
    up = concatenate([Conv2DTranspose(neurons, (2, 2), strides=(2, 2),
                     padding='same')(input), concatenate_link], axis=-1)
    conv1 = Conv2D(neurons, (3, 3,), activation='relu', padding='same')(up)
    if ba_norm : conv1 = BatchNormalization(momentum=ba_norm_momentum)(conv1)
    conv2 = Conv2D(neurons, (3, 3), activation='relu', padding='same')(conv1)
    if ba_norm : conv2 = BatchNormalization(momentum=ba_norm_momentum)(conv2)
    conc = concatenate([up, conv2], axis=-1)
    return conc

#-----------------------------------------------------#
#                   Subroutines 3D                    #
#-----------------------------------------------------#
# Create a contracting layer