Python keras.losses.mean_squared_error() Examples

The following are 13 code examples for showing how to use keras.losses.mean_squared_error(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module keras.losses , or try the search function .

Example 1
Project: pyod   Author: yzhao062   File: auto_encoder.py    License: BSD 2-Clause "Simplified" License 5 votes vote down vote up
def __init__(self, hidden_neurons=None,
                 hidden_activation='relu', output_activation='sigmoid',
                 loss=mean_squared_error, optimizer='adam',
                 epochs=100, batch_size=32, dropout_rate=0.2,
                 l2_regularizer=0.1, validation_size=0.1, preprocessing=True,
                 verbose=1, random_state=None, contamination=0.1):
        super(AutoEncoder, self).__init__(contamination=contamination)
        self.hidden_neurons = hidden_neurons
        self.hidden_activation = hidden_activation
        self.output_activation = output_activation
        self.loss = loss
        self.optimizer = optimizer
        self.epochs = epochs
        self.batch_size = batch_size
        self.dropout_rate = dropout_rate
        self.l2_regularizer = l2_regularizer
        self.validation_size = validation_size
        self.preprocessing = preprocessing
        self.verbose = verbose
        self.random_state = random_state

        # default values
        if self.hidden_neurons is None:
            self.hidden_neurons = [64, 32, 32, 64]

        # Verify the network design is valid
        if not self.hidden_neurons == self.hidden_neurons[::-1]:
            print(self.hidden_neurons)
            raise ValueError("Hidden units should be symmetric")

        self.hidden_neurons_ = self.hidden_neurons

        check_parameter(dropout_rate, 0, 1, param_name='dropout_rate',
                        include_left=True) 
Example 2
Project: voxelmorph   Author: voxelmorph   File: metrics.py    License: GNU General Public License v3.0 5 votes vote down vote up
def l2(y_true, y_pred):
    """ L2 metric (MSE) """
    return losses.mean_squared_error(y_true, y_pred)


###############################################################################
# Helper Functions
############################################################################### 
Example 3
Project: alphazero   Author: witchu   File: keras_model.py    License: Apache License 2.0 5 votes vote down vote up
def build(args):
    model = build_model(args)
    model.compile(loss=['categorical_crossentropy', 'mean_squared_error'],
                    optimizer=SGD(lr=args['learning_rate'], momentum = args['momentum']),
                    #optimizer='adam',
                    loss_weights=[0.5, 0.5])
    return model 
Example 4
Project: coremltools   Author: apple   File: test_keras2.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_updatable_model_flag_mse_adam(self):
        """
        Test to ensure that respect_trainable is honored during convert of a
        model with mean squared error loss and the Adam optimizer.
        """
        import coremltools
        from keras.layers import Dense
        from keras.losses import mean_squared_error
        from keras.optimizers import Adam

        input = ["data"]
        output = ["output"]

        # Again, this should give an updatable model.
        updatable = Sequential()
        updatable.add(Dense(128, input_shape=(16,)))
        updatable.add(Dense(10, name="foo", activation="softmax", trainable=True))
        updatable.compile(
            loss=mean_squared_error,
            optimizer=Adam(lr=1.0, beta_1=0.5, beta_2=0.75, epsilon=0.25),
            metrics=["accuracy"],
        )
        cml = coremltools.converters.keras.convert(
            updatable, input, output, respect_trainable=True
        )
        spec = cml.get_spec()
        self.assertTrue(spec.isUpdatable)
        layers = spec.neuralNetwork.layers
        self.assertIsNotNone(layers[1].innerProduct)
        self.assertTrue(layers[1].innerProduct)
        self.assertTrue(layers[1].isUpdatable)
        self.assertEqual(len(spec.neuralNetwork.updateParams.lossLayers), 1)
        adopt = spec.neuralNetwork.updateParams.optimizer.adamOptimizer
        self.assertEqual(adopt.learningRate.defaultValue, 1.0)
        self.assertEqual(adopt.beta1.defaultValue, 0.5)
        self.assertEqual(adopt.beta2.defaultValue, 0.75)
        self.assertEqual(adopt.eps.defaultValue, 0.25) 
Example 5
Project: connect4-alpha-zero   Author: Zeta36   File: model_connect4.py    License: MIT License 5 votes vote down vote up
def objective_function_for_value(y_true, y_pred):
    return mean_squared_error(y_true, y_pred) 
Example 6
Project: DLWP   Author: jweyn   File: custom.py    License: MIT License 5 votes vote down vote up
def latitude_weighted_loss(loss_function=mean_squared_error, lats=None, output_shape=(), axis=-2, weighting='cosine'):
    """
    Create a loss function that weights inputs by a function of latitude before calculating the loss.

    :param loss_function: method: Keras loss function to apply after the weighting
    :param lats: ndarray: 1-dimensional array of latitude coordinates
    :param output_shape: tuple: shape of expected model output
    :param axis: int: latitude axis in model output shape
    :param weighting: str: type of weighting to apply. Options are:
            cosine: weight by the cosine of the latitude (default)
            midlatitude: weight by the cosine of the latitude but also apply a 25% reduction to the equator and boost
                to the mid-latitudes
    :return: callable loss function
    """
    if weighting not in ['cosine', 'midlatitude']:
        raise ValueError("'weighting' must be one of 'cosine' or 'midlatitude'")
    if lats is not None:
        lat_tensor = K.zeros(lats.shape)
        lat_tensor.assign(K.cast_to_floatx(lats[:]))

        weights = K.cos(lat_tensor * np.pi / 180.)
        if weighting == 'midlatitude':
            weights = weights + 0.5 * K.pow(K.sin(lat_tensor * 2 * np.pi / 180.), 2.)

        weight_shape = output_shape[axis:]
        for d in weight_shape[1:]:
            weights = K.expand_dims(weights, axis=-1)
            weights = K.repeat_elements(weights, d, axis=-1)

    else:
        weights = K.ones(output_shape)

    def lat_loss(y_true, y_pred):
        return loss_function(y_true * weights, y_pred * weights)

    return lat_loss 
Example 7
Project: DLWP   Author: jweyn   File: custom.py    License: MIT License 5 votes vote down vote up
def anomaly_correlation(y_true, y_pred, mean=0., regularize_mean='mse', reverse=True):
    """
    Calculate the anomaly correlation. FOR NOW, ASSUMES THAT THE CLIMATOLOGICAL MEAN IS 0, AND THEREFORE REQUIRES DATA
    TO BE SCALED TO REMOVE SPATIALLY-DEPENDENT MEAN.

    :param y_true: Tensor: target values
    :param y_pred: Tensor: model-predicted values
    :param mean: float: subtract this global mean from all predicted and target array values. IGNORED FOR NOW.
    :param regularize_mean: str or None: if not None, also penalizes a form of mean squared error:
        global: penalize differences in the global mean
        spatial: penalize differences in spatially-averaged mean (last two dimensions)
        mse: penalize the mean squared error
        mae: penalize the mean absolute error
    :param reverse: bool: if True, inverts the loss so that -1 is the target score
    :return: float: anomaly correlation loss
    """
    if regularize_mean is not None:
        assert regularize_mean in ['global', 'spatial', 'mse', 'mae']
    a = (K.mean(y_pred * y_true)
         / K.sqrt(K.mean(K.square(y_pred)) * K.mean(K.square(y_true))))
    if regularize_mean is not None:
        if regularize_mean == 'global':
            m = K.abs((K.mean(y_true) - K.mean(y_pred)) / K.mean(y_true))
        elif regularize_mean == 'spatial':
            m = K.mean(K.abs((K.mean(y_true, axis=[-2, -1]) - K.mean(y_pred, axis=[-2, -1]))
                             / K.mean(y_true, axis=[-2, -1])))
        elif regularize_mean == 'mse':
            m = mean_squared_error(y_true, y_pred)
        elif regularize_mean == 'mae':
            m = mean_absolute_error(y_true, y_pred)
    if reverse:
        if regularize_mean is not None:
            return m - a
        else:
            return -a
    else:
        if regularize_mean:
            return a - m
        else:
            return a 
Example 8
Project: faceswap   Author: deepfakes   File: _base.py    License: GNU General Public License v3.0 5 votes vote down vote up
def loss_dict(self):
        """ Return the loss dict """
        loss_dict = dict(mae=losses.mean_absolute_error,
                         mse=losses.mean_squared_error,
                         logcosh=losses.logcosh,
                         smooth_loss=generalized_loss,
                         l_inf_norm=l_inf_norm,
                         ssim=DSSIMObjective(),
                         gmsd=gmsd_loss,
                         pixel_gradient_diff=gradient_loss)
        return loss_dict 
Example 9
Project: cyclegan_keras   Author: alecGraves   File: losses.py    License: The Unlicense 5 votes vote down vote up
def discriminator_loss(y_true, y_pred):
    loss = mean_squared_error(y_true, y_pred)
    is_large = k.greater(loss, k.constant(_disc_train_thresh)) # threshold
    is_large = k.cast(is_large, k.floatx())
    return loss * is_large # binary threshold the loss to prevent overtraining the discriminator 
Example 10
Project: reversi-alpha-zero   Author: mokemokechicken   File: model.py    License: MIT License 5 votes vote down vote up
def objective_function_for_value(y_true, y_pred):
    return mean_squared_error(y_true, y_pred) 
Example 11
Project: alpha_zero_othello   Author: bhansconnect   File: aiplayer.py    License: MIT License 5 votes vote down vote up
def objective_function_for_value(y_true, y_pred):
        return mean_squared_error(y_true, y_pred) 
Example 12
Project: Benchmarks   Author: ECP-CANDLE   File: helper.py    License: MIT License 5 votes vote down vote up
def combined_loss(y_true, y_pred):
    '''
    Uses a combination of mean_squared_error and an L1 penalty on the output of AE
    '''
    return mse(y_true, y_pred) + 0.01*mae(0, y_pred) 
Example 13
Project: DLWP   Author: jweyn   File: custom.py    License: MIT License 4 votes vote down vote up
def anomaly_correlation_loss(mean=None, regularize_mean='mse', reverse=True):
    """
    Create a Keras loss function for anomaly correlation.

    :param mean: ndarray or None: if not None, must be an array with the same shape as the expected prediction, except
        that the first (batch) axis should have a dimension of 1.
    :param regularize_mean: str or None: if not None, also penalizes a form of mean squared error:
        global: penalize differences in the global mean
        spatial: penalize differences in spatially-averaged mean (last two dimensions)
        mse: penalize the mean squared error
        mae: penalize the mean absolute error
    :param reverse: bool: if True, inverts the loss so that -1 is the (minimized) target score. Must be True if
        regularize_mean is not None.
    :return: method: anomaly correlation loss function
    """
    if mean is not None:
        assert len(mean.shape) > 1
        assert mean.shape[0] == 1
        mean_tensor = K.variable(mean, name='anomaly_correlation_mean')

    if regularize_mean is not None:
        assert regularize_mean in ['global', 'spatial', 'mse', 'mae']
        reverse = True

    def acc_loss(y_true, y_pred):
        if mean is not None:
            a = (K.mean((y_pred - mean_tensor) * (y_true - mean_tensor))
                 / K.sqrt(K.mean(K.square((y_pred - mean_tensor))) * K.mean(K.square((y_true - mean_tensor)))))
        else:
            a = (K.mean(y_pred * y_true)
                 / K.sqrt(K.mean(K.square(y_pred)) * K.mean(K.square(y_true))))
        if regularize_mean is not None:
            if regularize_mean == 'global':
                m = K.abs((K.mean(y_true) - K.mean(y_pred)) / K.mean(y_true))
            elif regularize_mean == 'spatial':
                m = K.mean(K.abs((K.mean(y_true, axis=[-2, -1]) - K.mean(y_pred, axis=[-2, -1]))
                                 / K.mean(y_true, axis=[-2, -1])))
            elif regularize_mean == 'mse':
                m = mean_squared_error(y_true, y_pred)
            elif regularize_mean == 'mae':
                m = mean_absolute_error(y_true, y_pred)
        if reverse:
            if regularize_mean is not None:
                return m - a
            else:
                return -a
        else:
            if regularize_mean:
                return a - m
            else:
                return a

    return acc_loss


# Compatibility names