Python lasagne.nonlinearities.elu() Examples
The following are 22
code examples of lasagne.nonlinearities.elu().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
lasagne.nonlinearities
, or try the search function
.
Example #1
Source File: lasagne_net.py From BirdCLEF-Baseline with MIT License | 6 votes |
def initialization(name): initializations = {'sigmoid':init.HeNormal(gain=1.0), 'softmax':init.HeNormal(gain=1.0), 'elu':init.HeNormal(gain=1.0), 'relu':init.HeNormal(gain=math.sqrt(2)), 'lrelu':init.HeNormal(gain=math.sqrt(2/(1+0.01**2))), 'vlrelu':init.HeNormal(gain=math.sqrt(2/(1+0.33**2))), 'rectify':init.HeNormal(gain=math.sqrt(2)), 'identity':init.HeNormal(gain=math.sqrt(2)) } return initializations[name] #################### BASELINE MODEL #####################
Example #2
Source File: layers.py From Neural-Photo-Editor with MIT License | 5 votes |
def pd(num_layers=2,num_filters=32,filter_size=(3,3),pad=1,stride = (1,1),nonlinearity=elu,style='convolutional',bnorm=1,**kwargs): input_args = locals() input_args.pop('num_layers') return {key:entry if type(entry) is list else [entry]*num_layers for key,entry in input_args.iteritems()} # Possible Conv2DDNN convenience function. Remember to delete the C2D import at the top if you use this # def C2D(incoming = None, num_filters = 32, filter_size= [3,3],pad = 'same',stride = [1,1], W = initmethod('relu'),nonlinearity = elu,name = None): # return lasagne.layers.dnn.Conv2DDNNLayer(incoming,num_filters,filter_size,stride,pad,False,W,None,nonlinearity,False) # Shape-Preserving Gaussian Sample layer for latent vectors with spatial dimensions. # This is a holdover from an "old" (i.e. I abandoned it last month) idea.
Example #3
Source File: lasagne_net.py From BirdCLEF-Baseline with MIT License | 5 votes |
def nonlinearity(name): nonlinearities = {'rectify': nl.rectify, 'relu': nl.rectify, 'lrelu': nl.LeakyRectify(0.01), 'vlrelu': nl.LeakyRectify(0.33), 'elu': nl.elu, 'softmax': nl.softmax, 'sigmoid': nl.sigmoid, 'identity':nl.identity} return nonlinearities[name]
Example #4
Source File: VRN.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDropNoPre(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #5
Source File: VRN.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5, **kwargs): super(IfElseDropLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (identity if nonlinearity is None else nonlinearity) self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579)) self.p = 1-survival_p
Example #6
Source File: VRN.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResLayer(incoming, IB): return NL(ESL([IB,incoming]),elu) # If-else Drop Layer, adopted from Christopher Beckham's recipe: # https://github.com/Lasagne/Recipes/pull/67
Example #7
Source File: ensemble_model4.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDropNoPre(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #8
Source File: ensemble_model4.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5, **kwargs): super(IfElseDropLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (identity if nonlinearity is None else nonlinearity) self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579)) self.p = 1-survival_p
Example #9
Source File: ensemble_model4.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResLayer(incoming, IB): return NL(ESL([IB,incoming]),elu)
Example #10
Source File: ensemble_model5.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDropNoPre(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #11
Source File: ensemble_model5.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def get_output_for(self, input, deterministic=False, **kwargs): if deterministic: return self.p*input else: return theano.ifelse.ifelse( T.lt(self._srng.uniform( (1,), 0, 1)[0], self.p), input, T.zeros(input.shape) ) # def ResDrop(incoming, IB, p): # return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #12
Source File: ensemble_model5.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5, **kwargs): super(IfElseDropLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (identity if nonlinearity is None else nonlinearity) self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579)) self.p = 1-survival_p
Example #13
Source File: ensemble_model1.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDrop(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #14
Source File: ensemble_model1.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5, **kwargs): super(IfElseDropLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (identity if nonlinearity is None else nonlinearity) self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579)) self.p = 1-survival_p
Example #15
Source File: ensemble_model1.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResLayer(incoming, IB): return NL(ESL([IB,incoming]),elu)
Example #16
Source File: ensemble_model6.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDropNoPre(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #17
Source File: ensemble_model6.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def get_output_for(self, input, deterministic=False, **kwargs): if deterministic: return self.p*input else: return theano.ifelse.ifelse( T.lt(self._srng.uniform( (1,), 0, 1)[0], self.p), input, T.zeros(input.shape) ) # def ResDrop(incoming, IB, p): # return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #18
Source File: ensemble_model6.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResLayer(incoming, IB): return NL(ESL([IB,incoming]),elu)
Example #19
Source File: ensemble_model3.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResDropNoPre(incoming, IB, p): return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #20
Source File: ensemble_model3.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def get_output_for(self, input, deterministic=False, **kwargs): if deterministic: return self.p*input else: return theano.ifelse.ifelse( T.lt(self._srng.uniform( (1,), 0, 1)[0], self.p), input, T.zeros(input.shape) ) # def ResDrop(incoming, IB, p): # return NL(ESL([IfElseDropLayer(IB,survival_p=p),incoming]),elu)
Example #21
Source File: ensemble_model3.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def __init__(self, incoming, nonlinearity=elu, survival_p=0.5, **kwargs): super(IfElseDropLayer, self).__init__(incoming, **kwargs) self.nonlinearity = (identity if nonlinearity is None else nonlinearity) self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579)) self.p = 1-survival_p
Example #22
Source File: ensemble_model3.py From Generative-and-Discriminative-Voxel-Modeling with MIT License | 5 votes |
def ResLayer(incoming, IB): return NL(ESL([IB,incoming]),elu)