Python keras.regularizers() Examples

The following are code examples for showing how to use keras.regularizers(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: fancy-cnn   Author: textclf   File: timedistributed.py    MIT License 6 votes vote down vote up
def build(self):
        try:
            self.input_ndim = len(self.previous.input_shape)
        except AttributeError:
            self.input_ndim = len(self.input_shape)

        self.layer.set_input_shape((None, ) + self.input_shape[2:])

        if hasattr(self.layer, 'regularizers'):
            self.regularizers = self.layer.regularizers

        if hasattr(self.layer, 'constraints'):
            self.constraints = self.layer.constraints
        
        if hasattr(self.layer, 'trainable_weights'):
            self.trainable_weights = self.layer.trainable_weights

            if self.initial_weights is not None:
                self.layer.set_weights(self.initial_weights)
                del self.initial_weights 
Example 2
Project: sisy   Author: qorrect   File: build.py    Apache License 2.0 6 votes vote down vote up
def _build_layer_parameters(layer):
    parameters = copy.copy(layer.parameters)
    regularizers = [
        'activity_regularizer',
        'bias_regularizer',
        'kernel_regularizer',
        'kernel_initializer'
        'gamma_regularizer',
        'beta_regularizer']
    for regularizer in regularizers:
        if regularizer in parameters:
            parameters[regularizer] = _get_regularizer(parameters[regularizer])
    activation = parameters.get('activation', None)
    if activation:
        if is_custom_activation(activation):
            parameters['activation'] = get_custom_activation(activation)
    return parameters 
Example 3
Project: dense_tensor   Author: bstriner   File: utils.py    MIT License 5 votes vote down vote up
def add_activity_regularizer(layer):
    if layer.activity_regularizer and not keras_2:
        layer.activity_regularizer.set_layer(layer)
        if not hasattr(layer, 'regularizers'):
            layer.regularizers = []
            layer.regularizers.append(layer.activity_regularizer) 
Example 4
Project: dense_tensor   Author: bstriner   File: utils.py    MIT License 5 votes vote down vote up
def l1l2(l1_weight=0, l2_weight=0):
    if keras_2:
        from keras.regularizers import L1L2
        return L1L2(l1_weight, l2_weight)
    else:
        from keras.regularizers import l1l2
        return l1l2(l1_weight, l2_weight) 
Example 5
Project: dense_tensor   Author: bstriner   File: utils.py    MIT License 5 votes vote down vote up
def add_weight(layer,
               shape,
               name,
               initializer='random_uniform',
               regularizer=None,
               constraint=None):
    initializer = get_initializer(initializer)
    if keras_2:
        return layer.add_weight(initializer=initializer,
                                shape=shape,
                                name=name,
                                regularizer=regularizer,
                                constraint=constraint)
    else:
        # create weight
        w = initializer(shape, name=name)
        # add to trainable_weights
        if not hasattr(layer, 'trainable_weights'):
            layer.trainable_weights = []
        layer.trainable_weights.append(w)
        # add to regularizers
        if regularizer:
            if not hasattr(layer, 'regularizers'):
                layer.regularizers = []
            regularizer.set_param(w)
            layer.regularizers.append(regularizer)
        return w 
Example 6
Project: knowledgeflow   Author: 3rduncle   File: lcd.py    MIT License 5 votes vote down vote up
def buildConvolution(self, name):
        filters = self.params.get('filters')
        nb_filter = self.params.get('nb_filter')
        assert filters
        assert nb_filter
        convs = []
        for fsz in filters:
            layer_name = '%s-conv-%d' % (name, fsz)
            conv = Convolution1D(
                nb_filter=nb_filter,
                filter_length=fsz,
                border_mode='valid',
                #activation='relu',
                subsample_length=1,
                init='glorot_uniform',
                #init=init,
                #init=lambda shape, name: initializations.uniform(shape, scale=0.01, name=name),
                W_constraint=maxnorm(self.params.get('w_maxnorm')),
                b_constraint=maxnorm(self.params.get('b_maxnorm')),
                #W_regularizer=regularizers.l2(self.params.get('w_l2')),
                #b_regularizer=regularizers.l2(self.params.get('b_l2')),
                #input_shape=(self.q_length, self.wdim),
                name=layer_name
            )
            convs.append(conv)
        self.layers['%s-convolution' % name] = convs 
Example 7
Project: knowledgeflow   Author: 3rduncle   File: lcd.py    MIT License 5 votes vote down vote up
def buildConvolution(self, name):
        filters = self.params.get('filters')
        nb_filter = self.params.get('nb_filter')
        assert filters
        assert nb_filter
        convs = []
        for fsz in filters:
            layer_name = '%s-conv-%d' % (name, fsz)
            conv = Convolution1D(
                nb_filter=nb_filter,
                filter_length=fsz,
                border_mode='valid',
                #activation='relu',
                subsample_length=1,
                init='glorot_uniform',
                #init=init,
                #init=lambda shape, name: initializations.uniform(shape, scale=0.01, name=name),
                W_constraint=maxnorm(self.params.get('w_maxnorm')),
                b_constraint=maxnorm(self.params.get('b_maxnorm')),
                #W_regularizer=regularizers.l2(self.params.get('w_l2')),
                #b_regularizer=regularizers.l2(self.params.get('b_l2')),
                #input_shape=(self.q_length, self.wdim),
                name=layer_name
            )
            convs.append(conv)
        self.layers['%s-convolution' % name] = convs 
Example 8
Project: GewitterGefahr   Author: thunderhoser   File: architecture_utils.py    MIT License 5 votes vote down vote up
def get_weight_regularizer(l1_weight=DEFAULT_L1_WEIGHT,
                           l2_weight=DEFAULT_L2_WEIGHT):
    """Creates regularizer for network weights.

    :param l1_weight: L1 regularization weight.  This "weight" is not to be
        confused with those being regularized (weights learned by the net).
    :param l2_weight: L2 regularization weight.
    :return: regularizer_object: Instance of `keras.regularizers.l1_l2`.
    """

    error_checking.assert_is_geq(l1_weight, 0.)
    error_checking.assert_is_geq(l2_weight, 0.)
    return keras.regularizers.l1_l2(l1=l1_weight, l2=l2_weight) 
Example 9
Project: GewitterGefahr   Author: thunderhoser   File: architecture_utils.py    MIT License 5 votes vote down vote up
def get_1d_conv_layer(
        num_kernel_rows, num_rows_per_stride, num_filters,
        padding_type_string=NO_PADDING_STRING, weight_regularizer=None):
    """Creates layer for 1-D convolution.

    :param num_kernel_rows: See doc for `_check_convolution_options`.
    :param num_rows_per_stride: Same.
    :param num_filters: Same.
    :param padding_type_string: Same.
    :param weight_regularizer: Will be used to regularize weights in the new
        layer.  This may be instance of `keras.regularizers` or None (if you
        want no regularization).
    :return: layer_object: Instance of `keras.layers.Conv1D`.
    """

    _check_convolution_options(
        num_kernel_rows=num_kernel_rows,
        num_rows_per_stride=num_rows_per_stride,
        padding_type_string=padding_type_string,
        num_filters=num_filters, num_kernel_dimensions=1)

    return keras.layers.Conv1D(
        filters=num_filters, kernel_size=(num_kernel_rows,),
        strides=(num_rows_per_stride,), padding=padding_type_string,
        dilation_rate=(1,), activation=None, use_bias=True,
        kernel_initializer=KERNEL_INITIALIZER_NAME,
        bias_initializer=BIAS_INITIALIZER_NAME,
        kernel_regularizer=weight_regularizer,
        bias_regularizer=weight_regularizer) 
Example 10
Project: recurrent-attention-for-QA-SQUAD-based-on-keras   Author: wentaozhu   File: attentionlayer.py    MIT License 5 votes vote down vote up
def __init__(self, h, output_dim,
                 init='glorot_uniform', **kwargs):
        self.init = initializations.get(init)
        self.h = h
        self.output_dim = output_dim
        #removing the regularizers and the dropout
        super(AttenLayer, self).__init__(**kwargs)
        # this seems necessary in order to accept 3 input dimensions
        # (samples, timesteps, features)
        self.input_spec=[InputSpec(ndim=3)]