Python tensorflow.keras.backend() Examples
The following are 26
code examples of tensorflow.keras.backend().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.keras
, or try the search function
.
Example #1
Source File: operation_layers.py From onnx2keras with MIT License | 6 votes |
def convert_floor(node, params, layers, lambda_func, node_name, keras_name): """ Convert Floor layer :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: internal converter name :param keras_name: resulting layer name :return: None """ if len(node.input) != 1: assert AttributeError('More than 1 input for floor layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) def target_layer(x): # Floor is absent in keras.backend import tensorflow as tf return tf.floor(x) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) lambda_func[keras_name] = target_layer
Example #2
Source File: operation_layers.py From onnx2keras with MIT License | 6 votes |
def convert_pow(node, params, layers, lambda_func, node_name, keras_name): """ Convert Pow layer :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: internal converter name :param keras_name: resulting layer name :return: None """ if len(node.input) != 2: assert AttributeError('More than 2 inputs for pow layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) power = ensure_numpy_type(layers[node.input[1]]) def target_layer(x, a=power): import tensorflow.keras.backend as K return K.pow(x, a) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) lambda_func[keras_name] = target_layer
Example #3
Source File: operation_layers.py From onnx2keras with MIT License | 6 votes |
def convert_reduce_mean(node, params, layers, lambda_func, node_name, keras_name): """ Convert reduce mean. :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: internal converter name :param keras_name: resulting layer name :return: None """ if len(node.input) != 1: assert AttributeError('More than 1 input for reduce mean layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) def target_layer(x, axis=params['axes'], keepdims=params['keepdims']): import tensorflow.keras.backend as K return K.mean(x, keepdims=(keepdims == 1), axis=axis) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) layers[node_name].set_shape(layers[node_name].shape) lambda_func[keras_name] = target_layer
Example #4
Source File: operation_layers.py From onnx2keras with MIT License | 6 votes |
def convert_exp(node, params, layers, lambda_func, node_name, keras_name): """ Convert Exp layer :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: resulting layer name :return: None """ if len(node.input) != 1: assert AttributeError('More than 1 input for log layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) def target_layer(x): import tensorflow.keras.backend as K return K.exp(x) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) lambda_func[keras_name] = target_layer
Example #5
Source File: operation_layers.py From onnx2keras with MIT License | 6 votes |
def convert_log(node, params, layers, lambda_func, node_name, keras_name): """ Convert Log layer :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: internal converter name :param keras_name: resulting layer name :return: None """ if len(node.input) != 1: assert AttributeError('More than 1 input for log layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) def target_layer(x): import tensorflow.keras.backend as K return K.log(x) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) lambda_func[keras_name] = target_layer
Example #6
Source File: train.py From EfficientDet with Apache License 2.0 | 6 votes |
def check_args(parsed_args): """ Function to check for inherent contradictions within parsed arguments. For example, batch_size < num_gpus Intended to raise errors prior to backend initialisation. Args parsed_args: parser.parse_args() Returns parsed_args """ if parsed_args.gpu and parsed_args.batch_size < len(parsed_args.gpu.split(',')): raise ValueError( "Batch size ({}) must be equal to or higher than the number of GPUs ({})".format(parsed_args.batch_size, len(parsed_args.gpu.split( ',')))) return parsed_args
Example #7
Source File: imagenet_densenet.py From DeepPoseKit with Apache License 2.0 | 6 votes |
def transition_block(x, reduction, name, pool=True): """A transition block. # Arguments x: input tensor. reduction: float, compression rate at transition layers. name: string, block label. # Returns output tensor for the block. """ bn_axis = 3 if backend.image_data_format() == "channels_last" else 1 x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + "_bn")(x) x = layers.Activation("relu", name=name + "_relu")(x) x = layers.Conv2D( int(backend.int_shape(x)[bn_axis] * reduction), 1, use_bias=False, name=name + "_conv", )(x) if pool: x = layers.AveragePooling2D(2, strides=2, name=name + "_pool")(x) return x
Example #8
Source File: imagenet_utils.py From DeepPoseKit with Apache License 2.0 | 6 votes |
def correct_pad(backend, inputs, kernel_size): """Returns a tuple for zero-padding for 2D convolution with downsampling. # Arguments input_size: An integer or tuple/list of 2 integers. kernel_size: An integer or tuple/list of 2 integers. # Returns A tuple. """ img_dim = 2 if backend.image_data_format() == 'channels_first' else 1 input_size = backend.int_shape(inputs)[img_dim:(img_dim + 2)] if isinstance(kernel_size, int): kernel_size = (kernel_size, kernel_size) if input_size[0] is None: adjust = (1, 1) else: adjust = (1 - input_size[0] % 2, 1 - input_size[1] % 2) correct = (kernel_size[0] // 2, kernel_size[1] // 2) return ((correct[0] - adjust[0], correct[0]), (correct[1] - adjust[1], correct[1]))
Example #9
Source File: __init__.py From garbage_classify with Apache License 2.0 | 5 votes |
def get_submodules_from_kwargs(kwargs): backend = kwargs.get('backend', _KERAS_BACKEND) layers = kwargs.get('layers', _KERAS_LAYERS) models = kwargs.get('models', _KERAS_MODELS) utils = kwargs.get('utils', _KERAS_UTILS) for key in kwargs.keys(): if key not in ['backend', 'layers', 'models', 'utils']: raise TypeError('Invalid keyword argument: %s', key) return backend, layers, models, utils
Example #10
Source File: operation_layers.py From onnx2keras with MIT License | 5 votes |
def convert_reduce_sum(node, params, layers, lambda_func, node_name, keras_name): """ Convert reduce sum. :param node: current operation node :param params: operation attributes :param layers: available keras layers :param lambda_func: function for keras Lambda layer :param node_name: internal converter name :param keras_name: resulting layer name :return: None """ if len(node.input) != 1: assert AttributeError('More than 1 input for reduce sum layer.') input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) axis = params['axes'] def target_layer(x, axis=axis): import tensorflow.keras.backend as K return K.sum(x, keepdims=True, axis=axis) lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) layers[node_name] = lambda_layer(input_0) layers[node_name].set_shape(layers[node_name].shape) lambda_func[keras_name] = target_layer
Example #11
Source File: __init__.py From segmentation_models with MIT License | 5 votes |
def get_preprocessing(name): preprocess_input = Backbones.get_preprocessing(name) # add bakcend, models, layers, utils submodules in kwargs preprocess_input = inject_global_submodules(preprocess_input) # delete other kwargs # keras-applications preprocessing raise an error if something # except `backend`, `layers`, `models`, `utils` passed in kwargs preprocess_input = filter_kwargs(preprocess_input) return preprocess_input
Example #12
Source File: __init__.py From segmentation_models with MIT License | 5 votes |
def filter_kwargs(func): @functools.wraps(func) def wrapper(*args, **kwargs): new_kwargs = {k: v for k, v in kwargs.items() if k in ['backend', 'layers', 'models', 'utils']} return func(*args, **new_kwargs) return wrapper
Example #13
Source File: __init__.py From segmentation_models with MIT License | 5 votes |
def inject_global_submodules(func): @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = _KERAS_BACKEND kwargs['layers'] = _KERAS_LAYERS kwargs['models'] = _KERAS_MODELS kwargs['utils'] = _KERAS_UTILS return func(*args, **kwargs) return wrapper
Example #14
Source File: __init__.py From garbage_classify with Apache License 2.0 | 5 votes |
def inject_tfkeras_modules(func): import tensorflow.keras as tfkeras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = tfkeras.backend kwargs['layers'] = tfkeras.layers kwargs['models'] = tfkeras.models kwargs['utils'] = tfkeras.utils return func(*args, **kwargs) return wrapper
Example #15
Source File: __init__.py From garbage_classify with Apache License 2.0 | 5 votes |
def inject_keras_modules(func): import keras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = keras.backend kwargs['layers'] = keras.layers kwargs['models'] = keras.models kwargs['utils'] = keras.utils return func(*args, **kwargs) return wrapper
Example #16
Source File: tfkeras.py From classification_models with MIT License | 5 votes |
def get_kwargs(): return { 'backend': tfkeras.backend, 'layers': tfkeras.layers, 'models': tfkeras.models, 'utils': tfkeras.utils, }
Example #17
Source File: imagenet_utils.py From DeepPoseKit with Apache License 2.0 | 5 votes |
def preprocess_input(x, data_format=None, mode='caffe', **kwargs): """Preprocesses a tensor or Numpy array encoding a batch of images. # Arguments x: Input Numpy or symbolic tensor, 3D or 4D. The preprocessed data is written over the input data if the data types are compatible. To avoid this behaviour, `numpy.copy(x)` can be used. data_format: Data format of the image tensor/array. mode: One of "caffe", "tf" or "torch". - caffe: will convert the images from RGB to BGR, then will zero-center each color channel with respect to the ImageNet dataset, without scaling. - tf: will scale pixels between -1 and 1, sample-wise. - torch: will scale pixels between 0 and 1 and then will normalize each channel with respect to the ImageNet dataset. # Returns Preprocessed tensor or Numpy array. # Raises ValueError: In case of unknown `data_format` argument. """ if data_format is None: data_format = backend.image_data_format() if data_format not in {'channels_first', 'channels_last'}: raise ValueError('Unknown data_format ' + str(data_format)) if isinstance(x, np.ndarray): return _preprocess_numpy_input(x, data_format=data_format, mode=mode, **kwargs) else: return _preprocess_symbolic_input(x, data_format=data_format, mode=mode, **kwargs)
Example #18
Source File: imagenet_densenet.py From DeepPoseKit with Apache License 2.0 | 5 votes |
def conv_block(x, growth_rate, name, dilation=1): """A building block for a dense block. # Arguments x: input tensor. growth_rate: float, growth rate at dense layers. name: string, block label. # Returns Output tensor for the block. """ bn_axis = 3 if backend.image_data_format() == "channels_last" else 1 x1 = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + "_0_bn")( x ) x1 = layers.Activation("relu", name=name + "_0_relu")(x1) x1 = layers.Conv2D(4 * growth_rate, 1, use_bias=False, name=name + "_1_conv")(x1) x1 = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + "_1_bn")( x1 ) x1 = layers.Activation("relu", name=name + "_1_relu")(x1) x1 = layers.Conv2D( growth_rate, 3, padding="same", use_bias=False, dilation_rate=dilation, name=name + "_2_conv", )(x1) x = layers.Concatenate(axis=bn_axis, name=name + "_concat")([x, x1]) return x
Example #19
Source File: __init__.py From EfficientDet with Apache License 2.0 | 5 votes |
def inject_tfkeras_modules(func): import tensorflow.keras as tfkeras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = tfkeras.backend kwargs['layers'] = tfkeras.layers kwargs['models'] = tfkeras.models kwargs['utils'] = tfkeras.utils return func(*args, **kwargs) return wrapper
Example #20
Source File: __init__.py From EfficientDet with Apache License 2.0 | 5 votes |
def inject_keras_modules(func): import keras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = keras.backend kwargs['layers'] = keras.layers kwargs['models'] = keras.models kwargs['utils'] = keras.utils return func(*args, **kwargs) return wrapper
Example #21
Source File: __init__.py From EfficientDet with Apache License 2.0 | 5 votes |
def get_submodules_from_kwargs(kwargs): backend = kwargs.get('backend', _KERAS_BACKEND) layers = kwargs.get('layers', _KERAS_LAYERS) models = kwargs.get('models', _KERAS_MODELS) utils = kwargs.get('utils', _KERAS_UTILS) for key in kwargs.keys(): if key not in ['backend', 'layers', 'models', 'utils']: raise TypeError('Invalid keyword argument: %s', key) return backend, layers, models, utils
Example #22
Source File: __init__.py From efficientnet with Apache License 2.0 | 5 votes |
def inject_tfkeras_modules(func): import tensorflow.keras as tfkeras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = tfkeras.backend kwargs['layers'] = tfkeras.layers kwargs['models'] = tfkeras.models kwargs['utils'] = tfkeras.utils return func(*args, **kwargs) return wrapper
Example #23
Source File: __init__.py From efficientnet with Apache License 2.0 | 5 votes |
def inject_keras_modules(func): import keras @functools.wraps(func) def wrapper(*args, **kwargs): kwargs['backend'] = keras.backend kwargs['layers'] = keras.layers kwargs['models'] = keras.models kwargs['utils'] = keras.utils return func(*args, **kwargs) return wrapper
Example #24
Source File: __init__.py From efficientnet with Apache License 2.0 | 5 votes |
def get_submodules_from_kwargs(kwargs): backend = kwargs.get('backend', _KERAS_BACKEND) layers = kwargs.get('layers', _KERAS_LAYERS) models = kwargs.get('models', _KERAS_MODELS) utils = kwargs.get('utils', _KERAS_UTILS) for key in kwargs.keys(): if key not in ['backend', 'layers', 'models', 'utils']: raise TypeError('Invalid keyword argument: %s', key) return backend, layers, models, utils
Example #25
Source File: imagenet_utils.py From DeepPoseKit with Apache License 2.0 | 4 votes |
def _preprocess_symbolic_input(x, data_format, mode, **kwargs): """Preprocesses a tensor encoding a batch of images. # Arguments x: Input tensor, 3D or 4D. data_format: Data format of the image tensor. mode: One of "caffe", "tf" or "torch". - caffe: will convert the images from RGB to BGR, then will zero-center each color channel with respect to the ImageNet dataset, without scaling. - tf: will scale pixels between -1 and 1, sample-wise. - torch: will scale pixels between 0 and 1 and then will normalize each channel with respect to the ImageNet dataset. # Returns Preprocessed tensor. """ if mode == 'tf': x /= 127.5 x -= 1. return x if mode == 'torch': x /= 255. mean = [0.485, 0.456, 0.406] std = [0.229, 0.224, 0.225] else: if data_format == 'channels_first': # 'RGB'->'BGR' if backend.ndim(x) == 3: x = x[::-1, ...] else: x = x[:, ::-1, ...] else: # 'RGB'->'BGR' x = x[..., ::-1] mean = [103.939, 116.779, 123.68] std = None mean_tensor = backend.constant(-np.array(mean)) # Zero-center by mean pixel if backend.dtype(x) != backend.dtype(mean_tensor): x = backend.bias_add( x, backend.cast(mean_tensor, backend.dtype(x)), data_format=data_format) else: x = backend.bias_add(x, mean_tensor, data_format) if std is not None: x /= std return x
Example #26
Source File: __init__.py From segmentation_models with MIT License | 4 votes |
def set_framework(name): """Set framework for Segmentation Models Args: name (str): one of ``keras``, ``tf.keras``, case insensitive. Raises: ValueError: in case of incorrect framework name. ImportError: in case framework is not installed. """ name = name.lower() if name == _KERAS_FRAMEWORK_NAME: import keras import efficientnet.keras # init custom objects elif name == _TF_KERAS_FRAMEWORK_NAME: from tensorflow import keras import efficientnet.tfkeras # init custom objects else: raise ValueError('Not correct module name `{}`, use `{}` or `{}`'.format( name, _KERAS_FRAMEWORK_NAME, _TF_KERAS_FRAMEWORK_NAME)) global _KERAS_BACKEND, _KERAS_LAYERS, _KERAS_MODELS global _KERAS_UTILS, _KERAS_LOSSES, _KERAS_FRAMEWORK _KERAS_FRAMEWORK = name _KERAS_BACKEND = keras.backend _KERAS_LAYERS = keras.layers _KERAS_MODELS = keras.models _KERAS_UTILS = keras.utils _KERAS_LOSSES = keras.losses # allow losses/metrics get keras submodules base.KerasObject.set_submodules( backend=keras.backend, layers=keras.layers, models=keras.models, utils=keras.utils, ) # set default framework