Python keras.backend.random_binomial() Examples

The following are code examples for showing how to use keras.backend.random_binomial(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: keras_bn_library   Author: bnsnapper   File: layers.py    MIT License 6 votes vote down vote up
def call(self, x, mask=None):
		if self.mode == 'maximum_likelihood':
			# draw maximum likelihood sample from Bernoulli distribution
			#    x* = argmax_x p(x) = 1         if p(x=1) >= 0.5
			#                         0         otherwise
			return K.round(x)
		elif self.mode == 'random':
			# draw random sample from Bernoulli distribution
			#    x* = x ~ p(x) = 1              if p(x=1) > uniform(0, 1)
			#                    0              otherwise
			#return self.srng.binomial(size=x.shape, n=1, p=x, dtype=K.floatx())
			return K.random_binomial(x.shape, p=x, dtype=K.floatx())
		elif self.mode == 'mean_field':
			# draw mean-field approximation sample from Bernoulli distribution
			#    x* = E[p(x)] = E[Bern(x; p)] = p
			return x
		elif self.mode == 'nrlu':
			return nrlu(x)
		else:
			raise NotImplementedError('Unknown sample mode!') 
Example 2
Project: keras_bn_library   Author: bnsnapper   File: rbm.py    MIT License 6 votes vote down vote up
def sample_h_given_x(self, x):
		h_pre = K.dot(x, self.Wrbm) + self.bh	
		h_sigm = self.activation(self.scaling_h_given_x * h_pre)

		# drop out noise
		#if(0.0 < self.p < 1.0):
		#	noise_shape = self._get_noise_shape(h_sigm)
		#	h_sigm = K.in_train_phase(K.dropout(h_sigm, self.p, noise_shape), h_sigm)
		
		if(self.hidden_unit_type == 'binary'):
			h_samp = K.random_binomial(shape=h_sigm.shape, p=h_sigm)
	        # random sample
	        #   \hat{h} = 1,      if p(h=1|x) > uniform(0, 1)
	        #             0,      otherwise
		elif(self.hidden_unit_type == 'nrlu'):
			h_samp = nrlu(h_pre)
		else:
			h_samp = h_sigm

		if(0.0 < self.p < 1.0):
			noise_shape = self._get_noise_shape(h_samp)
			h_samp = K.in_train_phase(K.dropout(h_samp, self.p, noise_shape), h_samp)

		return h_samp, h_pre, h_sigm 
Example 3
Project: Keras-DropBlock   Author: MLearing   File: drop_block.py    MIT License 5 votes vote down vote up
def _compute_drop_mask(self, shape):
        seq_length = shape[1]
        mask = K.random_binomial(shape, p=self._get_gamma(seq_length))
        mask *= self._compute_valid_seed_region(seq_length)
        mask = keras.layers.MaxPool1D(
            pool_size=self.block_size,
            padding='same',
            strides=1,
            data_format='channels_last',
        )(mask)
        return 1.0 - mask 
Example 4
Project: Keras-DropBlock   Author: MLearing   File: drop_block.py    MIT License 5 votes vote down vote up
def _compute_drop_mask(self, shape):
        height, width = shape[1], shape[2]
        mask = K.random_binomial(shape, p=self._get_gamma(height, width))
        mask *= self._compute_valid_seed_region(height, width)
        mask = keras.layers.MaxPool2D(
            pool_size=(self.block_size, self.block_size),
            padding='same',
            strides=1,
            data_format='channels_last',
        )(mask)
        return 1.0 - mask 
Example 5
Project: keras_bn_library   Author: bnsnapper   File: rbm.py    MIT License 5 votes vote down vote up
def sample_x_given_h(self, h):
		x_pre = K.dot(h, self.Wrbm.T) + self.bx 

		if(self.visible_unit_type == 'gaussian'):
			x_samp = self.scaling_x_given_h  * x_pre
			return x_samp, x_samp, x_samp
		else:       
			x_sigm = K.sigmoid(self.scaling_x_given_h  * x_pre)             
			x_samp = K.random_binomial(shape=x_sigm.shape, p=x_sigm)
			return x_samp, x_pre, x_sigm 
Example 6
Project: multilabel-nn   Author: cambridgeltl   File: layers.py    MIT License 5 votes vote down vote up
def call(self, x, mask=None):
        if 0. < self.dropout < 1.:
            retain_p = 1. - self.dropout
            B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
            B = K.expand_dims(B)
            W = K.in_train_phase(self.W * B, self.W)
        else:
            W = self.W
        out = K.gather(W, x)
        return out 
Example 7
Project: keras-drop-block   Author: CyberZHG   File: drop_block.py    MIT License 5 votes vote down vote up
def _compute_drop_mask(self, shape):
        seq_length = shape[1]
        mask = K.random_binomial(shape, p=self._get_gamma(seq_length))
        mask *= self._compute_valid_seed_region(seq_length)
        mask = keras.layers.MaxPool1D(
            pool_size=self.block_size,
            padding='same',
            strides=1,
            data_format='channels_last',
        )(mask)
        return 1.0 - mask 
Example 8
Project: keras-drop-block   Author: CyberZHG   File: drop_block.py    MIT License 5 votes vote down vote up
def _compute_drop_mask(self, shape):
        height, width = shape[1], shape[2]
        mask = K.random_binomial(shape, p=self._get_gamma(height, width))
        mask *= self._compute_valid_seed_region(height, width)
        mask = keras.layers.MaxPool2D(
            pool_size=(self.block_size, self.block_size),
            padding='same',
            strides=1,
            data_format='channels_last',
        )(mask)
        return 1.0 - mask 
Example 9
Project: TF_PG_GANS   Author: naykun   File: layers.py    MIT License 5 votes vote down vote up
def call(self, input,deterministic=False, **kwargs):
        if self.gain is not None:
            input = input * self.gain
        if deterministic or not self.strength:
            return input

        in_shape  = self.input_shape
        in_axes   = range(len(in_shape))
        in_shape  = [in_shape[axis] if in_shape[axis] is not None else input.shape[axis] for axis in in_axes] # None => Theano expr
        rnd_shape = [in_shape[axis] for axis in self.axes]
        broadcast = [self.axes.index(axis) if axis in self.axes else 'x' for axis in in_axes]
        one       = K.constant(1)

        if self.mode == 'drop':
            p = one - self.strength
            rnd = K.random_binomial(tuple(rnd_shape), p=p, dtype=input.dtype) / p

        elif self.mode == 'mul':
            rnd = (one + self.strength) ** K.random_normal(tuple(rnd_shape), dtype=input.dtype)

        elif self.mode == 'prop':
            coef = self.strength * K.constant(np.sqrt(np.float32(self.input_shape[1])))
            rnd = K.random_normal(tuple(rnd_shape), dtype=input.dtype) * coef + one

        else:
            raise ValueError('Invalid GDropLayer mode', self.mode)

        if self.normalize:
            rnd = rnd / K.sqrt(K.mean(rnd ** 2, axis=3, keepdims=True))
        return input * K.permute_dimensions(rnd,broadcast) 
Example 10
Project: Keras-progressive_growing_of_gans   Author: MSC-BUAA   File: layers.py    MIT License 5 votes vote down vote up
def call(self, input,deterministic=False, **kwargs):
        if self.gain is not None:
            input = input * self.gain
        if deterministic or not self.strength:
            return input

        in_shape  = self.input_shape
        in_axes   = range(len(in_shape))
        in_shape  = [in_shape[axis] if in_shape[axis] is not None else input.shape[axis] for axis in in_axes] # None => Theano expr
        rnd_shape = [in_shape[axis] for axis in self.axes]
        broadcast = [self.axes.index(axis) if axis in self.axes else 'x' for axis in in_axes]
        one       = K.constant(1)

        if self.mode == 'drop':
            p = one - self.strength
            rnd = K.random_binomial(tuple(rnd_shape), p=p, dtype=input.dtype) / p

        elif self.mode == 'mul':
            rnd = (one + self.strength) ** K.random_normal(tuple(rnd_shape), dtype=input.dtype)

        elif self.mode == 'prop':
            coef = self.strength * K.constant(np.sqrt(np.float32(self.input_shape[1])))
            rnd = K.random_normal(tuple(rnd_shape), dtype=input.dtype) * coef + one

        else:
            raise ValueError('Invalid GDropLayer mode', self.mode)

        if self.normalize:
            rnd = rnd / K.sqrt(K.mean(rnd ** 2, axis=3, keepdims=True))
        return input * K.permute_dimensions(rnd,broadcast) 
Example 11
Project: ikelos   Author: braingineer   File: embeddings.py    MIT License 4 votes vote down vote up
def call(self, x, mask=None):
        if isinstance(x, list): 
            x,_ = x
        if mask is not None and isinstance(mask, list):
            mask,_ = mask
        if 0. < self.dropout < 1.:
            retain_p = 1. - self.dropout
            dims = self.W._keras_shape[:-1]
            B = K.random_binomial(dims, p=retain_p) * (1. / retain_p)
            B = K.expand_dims(B)
            W = K.in_train_phase(self.W * B, self.W)
        else:
            W = self.W
        
        if self.mode == 'matrix':
            return K.gather(W,x)
        elif self.mode == 'tensor':
            # quick and dirty: only allowing for 3dim inputs when it's tensor mode
            assert K.ndim(x) == 3
            # put sequence on first; gather; take diagonal across shared batch dimension
            # in other words, W is (B, S, F)
            # incoming x is (B, S, A)
            inds = K.arange(self.W._keras_shape[0])
            #out = K.gather(K.permute_dimensions(W, (1,0,2)), x).diagonal(axis1=0, axis2=3)
            #return K.permute_dimensions(out, (3,0,1,2))
            ### method above doesn't do grads =.=
            # tensor abc goes to bac, indexed onto with xyz, goes to xyzac, 
            # x == a, so shape to xayzc == xxyzc
            # take diagonal on first two: xyzc 
            #out = K.colgather()
            out = K.gather(K.permute_dimensions(W, (1,0,2)), x) 
            out = K.permute_dimensions(out, (0,3,1,2,4))
            out = K.gather(out, (inds, inds))
            return out
        else:
            raise Exception('sanity check. should not be here.')

        #all_dims = T.arange(len(self.W._keras_shape))
        #first_shuffle = [all_dims[self.embed_dim]] + all_dims[:self.embed_dim] + all_dims[self.embed_dim+1:]
        ## 1. take diagonal from 0th to
        ## chang eof tactics
        ## embed on time or embed on batch. that's all I'm supporting.  
        ## if it's embed on time, then, x.ndim+1 is where batch will be, and is what
        ## i need to take the diagonal over. 
        ## now dim shuffle the xdims + 1 to the front.
        #todo: get second shuffle or maybe find diagonal calculations
        #out = K.gather(W, x)
        #return out

        ### reference
        #A = S(np.arange(60).reshape(3,4,5))
        #x = S(np.random.randint(0, 4, (3,4,10)))
        #x_emb = A.dimshuffle(1,0,2)[x].dimshuffle(0,3,1,2,4)[T.arange(A.shape[0]), T.arange(A.shape[0])]