Python lasagne.layers.Layer() Examples
The following are 9
code examples of lasagne.layers.Layer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
lasagne.layers
, or try the search function
.
Example #1
Source File: padded.py From reseg with GNU General Public License v3.0 | 5 votes |
def __init__(self, incoming, pool_size, stride=None, pad=(0, 0), ignore_border=True, centered=True, **kwargs): """A padded pooling layer Parameters ---------- incoming : lasagne.layers.Layer The input layer pool_size : int The size of the pooling stride : int or iterable of int The stride or subsampling of the convolution pad : int, iterable of int, ``full``, ``same`` or ``valid`` **Ignored!** Kept for compatibility with the :class:``lasagne.layers.Pool2DLayer`` ignore_border : bool See :class:``lasagne.layers.Pool2DLayer`` centered : bool If True, the padding will be added on both sides. If False the zero padding will be applied on the upper left side. **kwargs Any additional keyword arguments are passed to the Layer superclass """ self.centered = centered if pad not in [0, (0, 0), [0, 0]]: warnings.warn('The specified padding will be ignored', RuntimeWarning) super(PaddedPool2DLayer, self).__init__(incoming, pool_size, stride, pad, ignore_border, **kwargs) if self.input_shape[2:] != (None, None): warnings.warn('This Layer should only be used when the size of ' 'the image is not known', RuntimeWarning)
Example #2
Source File: padded.py From reseg with GNU General Public License v3.0 | 5 votes |
def __init__( self, l_in, patch_size, stride, data_format='bc01', centered=True, name='', **kwargs): """A Layer that zero-pads the input Parameters ---------- l_in : lasagne.layers.Layer The input layer patch_size : iterable of int The patch size stride : iterable of int The stride data_format : string The format of l_in, either `b01c` (batch, rows, cols, channels) or `bc01` (batch, channels, rows, cols) centered : bool If True, the padding will be added on both sides. If False the zero padding will be applied on the upper left side. name = string The name of the layer, optional """ super(DynamicPaddingLayer, self).__init__(l_in, name, **kwargs) self.l_in = l_in self.patch_size = patch_size self.stride = stride self.data_format = data_format self.centered = centered self.name = name
Example #3
Source File: padded.py From reseg with GNU General Public License v3.0 | 5 votes |
def get_equivalent_input_padding(layer, layers_args=[]): """Compute the equivalent padding in the input layer A function to compute the equivalent padding of a sequence of convolutional and pooling layers. It memorizes the padding of all the Layers up to the first InputLayer. It then computes what would be the equivalent padding in the Layer immediately before the chain of Layers that is being taken into account. """ # Initialize the DynamicPadding layers lasagne.layers.get_output(layer) # Loop through conv and pool to collect data all_layers = get_all_layers(layer) # while(not isinstance(layer, (InputLayer))): for layer in all_layers: # Note: stride is numerical, but pad *could* be symbolic try: pad, stride = (layer.pad, layer.stride) if isinstance(pad, int): pad = pad, pad if isinstance(stride, int): stride = stride, stride layers_args.append((pad, stride)) except(AttributeError): pass # Loop backward to compute the equivalent padding in the input # layer tot_pad = T.zeros(2) pad_factor = T.ones(2) while(layers_args): pad, stride = layers_args.pop() tot_pad += pad * pad_factor pad_factor *= stride return tot_pad
Example #4
Source File: padded.py From reseg with GNU General Public License v3.0 | 4 votes |
def __init__(self, incoming, num_filters, filter_size, stride=(1, 1), pad=0, untie_biases=False, W=init.GlorotUniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, flip_filters=True, convolution=theano.tensor.nnet.conv2d, centered=True, **kwargs): """A padded convolutional layer Note ---- If used in place of a :class:``lasagne.layers.Conv2DLayer`` be sure to specify `flag_filters=False`, which is the default for that layer Parameters ---------- incoming : lasagne.layers.Layer The input layer num_filters : int The number of filters or kernels of the convolution filter_size : int or iterable of int The size of the filters stride : int or iterable of int The stride or subsampling of the convolution pad : int, iterable of int, ``full``, ``same`` or ``valid`` **Ignored!** Kept for compatibility with the :class:``lasagne.layers.Conv2DLayer`` untie_biases : bool See :class:``lasagne.layers.Conv2DLayer`` W : Theano shared variable, expression, numpy array or callable See :class:``lasagne.layers.Conv2DLayer`` b : Theano shared variable, expression, numpy array, callable or None See :class:``lasagne.layers.Conv2DLayer`` nonlinearity : callable or None See :class:``lasagne.layers.Conv2DLayer`` flip_filters : bool See :class:``lasagne.layers.Conv2DLayer`` convolution : callable See :class:``lasagne.layers.Conv2DLayer`` centered : bool If True, the padding will be added on both sides. If False the zero padding will be applied on the upper left side. **kwargs Any additional keyword arguments are passed to the :class:``lasagne.layers.Layer`` superclass """ self.centered = centered if pad not in [0, (0, 0), [0, 0]]: warnings.warn('The specified padding will be ignored', RuntimeWarning) super(PaddedConv2DLayer, self).__init__(incoming, num_filters, filter_size, stride, pad, untie_biases, W, b, nonlinearity, flip_filters, **kwargs) if self.input_shape[2:] != (None, None): warnings.warn('This Layer should only be used when the size of ' 'the image is not known', RuntimeWarning)
Example #5
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 4 votes |
def batch_norm(layer, **kwargs): """ Apply batch normalization to an existing layer. This is a convenience function modifying an existing layer to include batch normalization: It will steal the layer's nonlinearity if there is one (effectively introducing the normalization right before the nonlinearity), remove the layer's bias if there is one (because it would be redundant), and add a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top. Parameters ---------- layer : A :class:`Layer` instance The layer to apply the normalization to; note that it will be irreversibly modified as specified above **kwargs Any additional keyword arguments are passed on to the :class:`BatchNormLayer` constructor. Returns ------- BatchNormLayer or NonlinearityLayer instance A batch normalization layer stacked on the given modified `layer`, or a nonlinearity layer stacked on top of both if `layer` was nonlinear. Examples -------- Just wrap any layer into a :func:`batch_norm` call on creating it: >>> from lasagne.layers import InputLayer, DenseLayer, batch_norm >>> from lasagne.nonlinearities import tanh >>> l1 = InputLayer((64, 768)) >>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh)) This introduces batch normalization right before its nonlinearity: >>> from lasagne.layers import get_all_layers >>> [l.__class__.__name__ for l in get_all_layers(l2)] ['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer'] """ nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormLayer(layer, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer
Example #6
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 4 votes |
def batch_norm(layer, **kwargs): """ Apply batch normalization to an existing layer. This is a convenience function modifying an existing layer to include batch normalization: It will steal the layer's nonlinearity if there is one (effectively introducing the normalization right before the nonlinearity), remove the layer's bias if there is one (because it would be redundant), and add a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top. Parameters ---------- layer : A :class:`Layer` instance The layer to apply the normalization to; note that it will be irreversibly modified as specified above **kwargs Any additional keyword arguments are passed on to the :class:`BatchNormLayer` constructor. Returns ------- BatchNormLayer or NonlinearityLayer instance A batch normalization layer stacked on the given modified `layer`, or a nonlinearity layer stacked on top of both if `layer` was nonlinear. Examples -------- Just wrap any layer into a :func:`batch_norm` call on creating it: >>> from lasagne.layers import InputLayer, DenseLayer, batch_norm >>> from lasagne.nonlinearities import tanh >>> l1 = InputLayer((64, 768)) >>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh)) This introduces batch normalization right before its nonlinearity: >>> from lasagne.layers import get_all_layers >>> [l.__class__.__name__ for l in get_all_layers(l2)] ['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer'] """ nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormLayer(layer, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer
Example #7
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 4 votes |
def batch_norm(layer, **kwargs): """ Apply batch normalization to an existing layer. This is a convenience function modifying an existing layer to include batch normalization: It will steal the layer's nonlinearity if there is one (effectively introducing the normalization right before the nonlinearity), remove the layer's bias if there is one (because it would be redundant), and add a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top. Parameters ---------- layer : A :class:`Layer` instance The layer to apply the normalization to; note that it will be irreversibly modified as specified above **kwargs Any additional keyword arguments are passed on to the :class:`BatchNormLayer` constructor. Returns ------- BatchNormLayer or NonlinearityLayer instance A batch normalization layer stacked on the given modified `layer`, or a nonlinearity layer stacked on top of both if `layer` was nonlinear. Examples -------- Just wrap any layer into a :func:`batch_norm` call on creating it: >>> from lasagne.layers import InputLayer, DenseLayer, batch_norm >>> from lasagne.nonlinearities import tanh >>> l1 = InputLayer((64, 768)) >>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh)) This introduces batch normalization right before its nonlinearity: >>> from lasagne.layers import get_all_layers >>> [l.__class__.__name__ for l in get_all_layers(l2)] ['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer'] """ nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormLayer(layer, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer
Example #8
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 4 votes |
def batch_norm(layer, **kwargs): """ Apply batch normalization to an existing layer. This is a convenience function modifying an existing layer to include batch normalization: It will steal the layer's nonlinearity if there is one (effectively introducing the normalization right before the nonlinearity), remove the layer's bias if there is one (because it would be redundant), and add a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top. Parameters ---------- layer : A :class:`Layer` instance The layer to apply the normalization to; note that it will be irreversibly modified as specified above **kwargs Any additional keyword arguments are passed on to the :class:`BatchNormLayer` constructor. Returns ------- BatchNormLayer or NonlinearityLayer instance A batch normalization layer stacked on the given modified `layer`, or a nonlinearity layer stacked on top of both if `layer` was nonlinear. Examples -------- Just wrap any layer into a :func:`batch_norm` call on creating it: >>> from lasagne.layers import InputLayer, DenseLayer, batch_norm >>> from lasagne.nonlinearities import tanh >>> l1 = InputLayer((64, 768)) >>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh)) This introduces batch normalization right before its nonlinearity: >>> from lasagne.layers import get_all_layers >>> [l.__class__.__name__ for l in get_all_layers(l2)] ['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer'] """ nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormLayer(layer, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer
Example #9
Source File: batch_norms.py From u24_lymphocyte with BSD 3-Clause "New" or "Revised" License | 4 votes |
def batch_norm(layer, **kwargs): """ Apply batch normalization to an existing layer. This is a convenience function modifying an existing layer to include batch normalization: It will steal the layer's nonlinearity if there is one (effectively introducing the normalization right before the nonlinearity), remove the layer's bias if there is one (because it would be redundant), and add a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top. Parameters ---------- layer : A :class:`Layer` instance The layer to apply the normalization to; note that it will be irreversibly modified as specified above **kwargs Any additional keyword arguments are passed on to the :class:`BatchNormLayer` constructor. Returns ------- BatchNormLayer or NonlinearityLayer instance A batch normalization layer stacked on the given modified `layer`, or a nonlinearity layer stacked on top of both if `layer` was nonlinear. Examples -------- Just wrap any layer into a :func:`batch_norm` call on creating it: >>> from lasagne.layers import InputLayer, DenseLayer, batch_norm >>> from lasagne.nonlinearities import tanh >>> l1 = InputLayer((64, 768)) >>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh)) This introduces batch normalization right before its nonlinearity: >>> from lasagne.layers import get_all_layers >>> [l.__class__.__name__ for l in get_all_layers(l2)] ['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer'] """ nonlinearity = getattr(layer, 'nonlinearity', None) if nonlinearity is not None: layer.nonlinearity = nonlinearities.identity if hasattr(layer, 'b') and layer.b is not None: del layer.params[layer.b] layer.b = None layer = BatchNormLayer(layer, **kwargs) if nonlinearity is not None: from lasagne.layers import NonlinearityLayer layer = NonlinearityLayer(layer, nonlinearity) return layer