Python keras.regularizers.WeightRegularizer() Examples

The following are code examples for showing how to use keras.regularizers.WeightRegularizer(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: bayesian_dense   Author: bstriner   File: example.py    MIT License 6 votes vote down vote up
def model(hidden_dim=512, input_dim=28*28, sigma_regularization=1e-3, mu_regularization=1e-5, k=10,
          activation = lambda x: K.relu(x, 1.0 / 5.5)):
    """Create two layer MLP with softmax output"""
    _x = Input(shape=(input_dim,))
    layer = lambda output_dim, activation: BayesianDense(output_dim,
                                             activation=activation,
                                             W_sigma_regularizer=VariationalRegularizer(weight=sigma_regularization),
                                             b_sigma_regularizer=VariationalRegularizer(weight=sigma_regularization),
                                             W_regularizer=WeightRegularizer(l1=mu_regularization))

    h1 = layer(hidden_dim, activation)
    h2 = layer(hidden_dim, activation)
    y = layer(k, 'softmax')
    _y = y(h2(h1(_x)))

    m = Model(_x, _y)
    m.compile(Adam(1e-3),loss='categorical_crossentropy')
    return m 
Example 2
Project: dnnwsd   Author: crscardellino   File: mlp.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _fine_tuning(self, X, y, encoders):
        self._model = models.Sequential()

        logger.info(u"Fine tuning of the neural network")

        for encoder in encoders:
            self._model.add(encoder)

        self._model.add(
            core.Dense(
                input_dim=self._hidden_layers[-1],
                output_dim=self.output_dim,
                activation='softmax',
                init=self._weight_init,
                W_regularizer=regularizers.WeightRegularizer(l1=self._l1_regularizer, l2=self._l2_regularizer),
                activity_regularizer=regularizers.ActivityRegularizer(l1=self._l1_regularizer, l2=self._l2_regularizer)
            )
        )

        self._model.compile(optimizer=self._optimizer, loss='categorical_crossentropy')

        self._model.fit(X, y, batch_size=self._batch_size, nb_epoch=self._fine_tune_epochs, show_accuracy=True) 
Example 3
Project: distnet   Author: ssamot   File: neuralnetworks.py    GNU General Public License v2.0 5 votes vote down vote up
def reg():
    return None
    #return WeightRegularizer(l2 = 0.002, l1 = 0) 
Example 4
Project: conv-match   Author: ssamot   File: neuralnetworks.py    GNU General Public License v2.0 5 votes vote down vote up
def reg():
    return None
    #return WeightRegularizer(l2 = 0.002, l1 = 0) 
Example 5
Project: dnnwsd   Author: crscardellino   File: mlp.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _fit(self, X, y):
        logger.info(u"Building the network architecture")

        self._model = models.Sequential()

        previous_layer_size = self.input_dim

        for layer_size in self._hidden_layers:
            self._model.add(
                core.Dense(
                    input_dim=previous_layer_size,
                    output_dim=layer_size,
                    init=self._weight_init,
                    activation=self._activation
                )
            )
            self._model.add(
                core.Dropout(self._dropout_ratio, input_shape=(layer_size,))
            )
            previous_layer_size = layer_size

        self._model.add(
            core.Dense(
                input_dim=previous_layer_size,
                output_dim=self.output_dim,
                activation='softmax',
                init=self._weight_init,
                W_regularizer=regularizers.WeightRegularizer(l1=self._l1_regularizer, l2=self._l2_regularizer),
                activity_regularizer=regularizers.ActivityRegularizer(l1=self._l1_regularizer, l2=self._l2_regularizer)
            )
        )

        logger.info(u"Compiling the network")

        self._model.compile(optimizer=self._optimizer, loss='categorical_crossentropy')

        logger.info(u"Fitting the data to the network")

        self._model.fit(X, y, batch_size=self._batch_size, nb_epoch=self._fine_tune_epochs, show_accuracy=True) 
Example 6
Project: shallow   Author: nfoti   File: shallow_fun.py    BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def construct_model(model_spec, input_dim, output_dim):
    """
    Helper to construct a Keras model based on dict of specs and input size

    Parameters
    ----------
    model_spec: dict
        Dict containing keys: arch, activation, dropout, optimizer, loss,
            w_reg, metrics
    input_dim: int
        Size of input dimension
    output_dim: int
        Size of input dimension

    Returns
    -------
    model: Compiled keras.models.Sequential

    """

    model = Sequential()

    for li, layer_size in enumerate(model_spec['arch']):
        # Set output size for last layer
        if layer_size == 'None':
            layer_size = output_dim

        # For input layer, add input dimension
        if li == 0:
            temp_input_dim = input_dim
            model.add(Dense(layer_size,
                            input_dim=input_dim,
                            activation=model_spec['activation'],
                            W_regularizer=weight_reg(model_spec['w_reg'][0],
                                                     model_spec['w_reg'][1]),
                            name='Input'))
        else:
            model.add(Dense(layer_size,
                            activation=model_spec['activation'],
                            W_regularizer=weight_reg(model_spec['w_reg'][0],
                                                     model_spec['w_reg'][1]),
                            name='Layer_%i' % li))

        if model_spec['dropout'] > 0.:
            model.add(Dropout(model_spec['dropout'], name='Dropout_%i' % li))

    model.compile(optimizer=model_spec['optimizer'],
                  loss=model_spec['loss'],
                  metrics=model_spec['metrics'])

    return model