Python tensorflow.keras.losses() Examples

The following are 8 code examples of tensorflow.keras.losses(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.keras , or try the search function .
Example #1
Source File: utils.py    From MultiPlanarUNet with MIT License 6 votes vote down vote up
def _init(string_list, tf_funcs, custom_funcs, logger=None, **kwargs):
    """
    Helper for 'init_losses' or 'init_metrics'.
    Please refer to their docstrings.

    Args:
        string_list:  (list)   List of strings, each giving a name of a metric
                               or loss to use for training. The name should
                               refer to a function or class in either tf_funcs
                               or custom_funcs modules.
        tf_funcs:     (module) A Tensorflow.keras module of losses or metrics,
                               or a list of various modules to look through.
        custom_funcs: (module) A custom module or losses or metrics
        logger:       (Logger) A Logger object
        **kwargs:     (dict)   Parameters passed to all losses or metrics which
                               are represented by a class (i.e. not a function)

    Returns:
        A list of len(string_list) of initialized classes of losses or metrics
        or references to loss or metric functions.
    """
    initialized = []
    tf_funcs = ensure_list_or_tuple(tf_funcs)
    for func_or_class in ensure_list_or_tuple(string_list):
        modules_found = list(filter(None, [getattr(m, func_or_class, None)
                                           for m in tf_funcs]))
        if modules_found:
            initialized.append(modules_found[0])  # return the first found
        else:
            # Fall back to look in custom module
            initialized.append(getattr(custom_funcs, func_or_class))
    return initialized 
Example #2
Source File: utils.py    From MultiPlanarUNet with MIT License 6 votes vote down vote up
def init_losses(loss_string_list, logger=None, **kwargs):
    """
    Takes a list of strings each naming a loss function to return. The string
    name should correspond to a function or class that is an attribute of
    either the tensorflow.keras.losses or mpunet.evaluate.losses
    modules.

    The returned values are either references to the loss functions to use, or
    initialized loss classes for some custom losses (used when the loss
    requires certain parameters to be set).

    Args:
        loss_string_list: (list)   A list of strings each naming a loss to
                                   return
        logger:           (Logger) An optional Logger object
        **kwargs:         (dict)   Parameters that will be passed to all class
                                   loss functions (i.e. not to functions)

    Returns:
        A list of length(loss_string_list) of loss functions or initialized
        classes
    """
    return _init(
        loss_string_list, losses, custom_losses, logger, **kwargs
    ) 
Example #3
Source File: trainer.py    From U-Time with MIT License 5 votes vote down vote up
def compile_model(self, optimizer, optimizer_kwargs, loss, metrics, **kwargs):
        """
        Compile the stored tf.keras Model instance stored in self.model
        Sets the loss function, optimizer and metrics

        Args:
            optimizer:        (string) The name of a tf.keras.optimizers Optimizer
            optimizer_kwargs: (dict)   Key-word arguments passed to the Optimizer
            loss:             (string) The name of a tf.keras.losses or
                                       MultiPlanarUnet loss function
            metrics:          (list)   List of tf.keras.metrics or
                                       MultiPlanarUNet metrics.
            **kwargs:         (dict)   Key-word arguments passed to losses
                                       and/or metrics that accept such.
        """
        # Make sure sparse metrics and loss are specified as sparse
        metrics = ensure_list_or_tuple(metrics)
        losses = ensure_list_or_tuple(loss)
        ensure_sparse(metrics+losses)

        # Initialize optimizer
        optimizer = optimizers.__dict__[optimizer]
        optimizer = optimizer(**optimizer_kwargs)

        # Initialize loss(es) and metrics from tf.keras or MultiPlanarUNet
        losses = init_losses(losses, self.logger, **kwargs)
        metrics = init_metrics(metrics, self.logger, **kwargs)

        # Compile the model
        self.model.compile(optimizer=optimizer, loss=losses, metrics=metrics)
        self.logger("Optimizer:   %s" % optimizer)
        self.logger("Loss funcs:  %s" % losses)
        self.logger("Metrics:     %s" % init_metrics)
        return self 
Example #4
Source File: keras_classification_model.py    From DeepPavlov with Apache License 2.0 5 votes vote down vote up
def compile(self, model: Model, optimizer_name: str, loss_name: str,
                learning_rate: Optional[Union[float, List[float]]],
                learning_rate_decay: Optional[Union[float, str]]) -> Model:
        """
        Compile model with given optimizer and loss

        Args:
            model: keras uncompiled model
            optimizer_name: name of optimizer from keras.optimizers
            loss_name: loss function name (from keras.losses)
            learning_rate: learning rate.
            learning_rate_decay: learning rate decay.

        Returns:

        """
        optimizer_func = getattr(tensorflow.keras.optimizers, optimizer_name, None)
        if callable(optimizer_func):
            if isinstance(learning_rate, float) and isinstance(learning_rate_decay, float):
                # in this case decay will be either given in config or, by default, learning_rate_decay=0.
                self.optimizer = optimizer_func(lr=learning_rate, decay=learning_rate_decay)
            else:
                self.optimizer = optimizer_func()
        else:
            raise AttributeError("Optimizer {} is not defined in `tensorflow.keras.optimizers`".format(optimizer_name))

        loss_func = getattr(tensorflow.keras.losses, loss_name, None)
        if callable(loss_func):
            loss = loss_func
        else:
            raise AttributeError("Loss {} is not defined".format(loss_name))

        model.compile(optimizer=self.optimizer,
                      loss=loss)
        return model 
Example #5
Source File: metrics_tracking.py    From keras-tuner with Apache License 2.0 5 votes vote down vote up
def infer_metric_direction(metric):
    # Handle str input and get canonical object.
    if isinstance(metric, six.string_types):
        metric_name = metric

        if metric_name.startswith('val_'):
            metric_name = metric_name.replace('val_', '', 1)

        if metric_name.startswith('weighted_'):
            metric_name = metric_name.replace('weighted_', '', 1)

        # Special-cases (from `keras/engine/training_utils.py`)
        if metric_name in {'loss', 'crossentropy', 'ce'}:
            return 'min'
        elif metric_name == 'acc':
            return 'max'

        try:
            metric = keras.metrics.get(metric_name)
        except ValueError:
            try:
                metric = keras.losses.get(metric_name)
            except:
                # Direction can't be inferred.
                return None

    # Metric class, Loss class, or function.
    if isinstance(metric, (keras.metrics.Metric, keras.losses.Loss)):
        name = metric.__class__.__name__
        if name == 'MeanMetricWrapper':
            name = metric._fn.__name__
    else:
        name = metric.__name__

    if name in _MAX_METRICS or name in _MAX_METRIC_FNS:
        return 'max'
    elif hasattr(keras.metrics, name) or hasattr(keras.losses, name):
        return 'min'

    # Direction can't be inferred.
    return None 
Example #6
Source File: __init__.py    From segmentation_models with MIT License 5 votes vote down vote up
def inject_global_losses(func):
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
        kwargs['losses'] = _KERAS_LOSSES
        return func(*args, **kwargs)

    return wrapper 
Example #7
Source File: multinet.py    From deepimpute with MIT License 4 votes vote down vote up
def build(self, inputdims):
        if self.NN_parameters['architecture'] is None:
            self.loadDefaultArchitecture()

        print(self.NN_parameters['architecture'])

        inputs = [ Input(shape=(inputdim,)) for inputdim in inputdims ]
        outputs = inputs

        for layer in self.NN_parameters['architecture']:
            if layer['type'].lower() == 'dense':
                outputs = [ Dense(layer['neurons'], activation=layer['activation'])(output)
                            for output in outputs ]
            elif layer['type'].lower() == 'dropout':
                outputs = [ Dropout(layer['rate'], seed=self.seed)(output)
                            for output in outputs] 
            else:
                print("Unknown layer type.")

        outputs = [Dense(self.sub_outputdim, activation="softplus")(output)
                   for output in outputs]
                
        model = Model(inputs=inputs, outputs=outputs)

        loss = self.NN_parameters['loss']

        if loss in [k for k, v in globals().items() if callable(v)]:
            # if loss is a defined function
            loss = eval(self.NN_parameters['loss'])
            
        if not callable(loss):
            # it is defined in Keras
            if hasattr(keras.losses, loss):
                loss = getattr(keras.losses, loss)                
            else:
                print('Unknown loss: {}. Aborting.'.format(loss))
                exit(1)

        model.compile(optimizer=keras.optimizers.Adam(lr=self.NN_parameters['learning_rate']),
                      loss=loss)

        return model 
Example #8
Source File: __init__.py    From segmentation_models with MIT License 4 votes vote down vote up
def set_framework(name):
    """Set framework for Segmentation Models

    Args:
        name (str): one of ``keras``, ``tf.keras``, case insensitive.

    Raises:
        ValueError: in case of incorrect framework name.
        ImportError: in case framework is not installed.

    """
    name = name.lower()

    if name == _KERAS_FRAMEWORK_NAME:
        import keras
        import efficientnet.keras  # init custom objects
    elif name == _TF_KERAS_FRAMEWORK_NAME:
        from tensorflow import keras
        import efficientnet.tfkeras  # init custom objects
    else:
        raise ValueError('Not correct module name `{}`, use `{}` or `{}`'.format(
            name, _KERAS_FRAMEWORK_NAME, _TF_KERAS_FRAMEWORK_NAME))

    global _KERAS_BACKEND, _KERAS_LAYERS, _KERAS_MODELS
    global _KERAS_UTILS, _KERAS_LOSSES, _KERAS_FRAMEWORK

    _KERAS_FRAMEWORK = name
    _KERAS_BACKEND = keras.backend
    _KERAS_LAYERS = keras.layers
    _KERAS_MODELS = keras.models
    _KERAS_UTILS = keras.utils
    _KERAS_LOSSES = keras.losses

    # allow losses/metrics get keras submodules
    base.KerasObject.set_submodules(
        backend=keras.backend,
        layers=keras.layers,
        models=keras.models,
        utils=keras.utils,
    )


# set default framework