Python random.lognormvariate() Examples

The following are 17 code examples of random.lognormvariate(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module random , or try the search function .
Example #1
Source File: populationdistribution.py    From CAMISIM with Apache License 2.0 6 votes vote down vote up
def _add_timeseries_lognorm(list_population, mu, sigma):
		"""
			each abundance profile is produced by
			- draw new value from lognorm distribution
			- add old and new value and divide by 2

			@attention:

			@param list_population: Main list for all distributions
			@type : list[list[float]]
			@param mu: Mean
			@type mu: float
			@param sigma: standard deviation
			@type sigma: float

			@return: Nothing
			@rtype: None
		"""
		assert isinstance(list_population, list)
		assert isinstance(mu, (float, int, long))
		assert isinstance(sigma, (float, int, long))
		for index_p in xrange(len(list_population)):
			for index_i in xrange(len(list_population[index_p])-1):
				list_population[index_p][index_i+1] = (list_population[index_p][index_i] + random.lognormvariate(mu, sigma))/2 
Example #2
Source File: populationdistribution.py    From CAMISIM with Apache License 2.0 6 votes vote down vote up
def _add_differential(list_population, mu, sigma):
		"""
			Abundance is drawn independently from previous lognorm distributions

			@attention:

			@param list_population: Main list for all distributions
			@type : list[list[float]]
			@param mu: Mean
			@type mu: float
			@param sigma: standard deviation
			@type sigma: float

			@return: Nothing
			@rtype: None
		"""
		assert isinstance(list_population, list)
		assert isinstance(mu, (float, int, long))
		assert isinstance(sigma, (float, int, long))
		for index_p in xrange(len(list_population)):
			for index_i in xrange(len(list_population[index_p])-1):
				list_population[index_p][index_i+1] = random.lognormvariate(mu, sigma) 
Example #3
Source File: populationdistribution.py    From CAMISIM with Apache License 2.0 6 votes vote down vote up
def _add_initial_log_distribution(list_population, mu, sigma):
		"""
			Adding a initial distribution

			@attention: Values for first sample

			@param list_population: Main list for all distributions
			@type : list[list[float]]
			@param mu: Mean
			@type mu: float
			@param sigma: standard deviation
			@type sigma: float

			@return: Nothing
			@rtype: None
		"""
		assert isinstance(list_population, list)
		assert isinstance(mu, (float, int, long))
		assert isinstance(sigma, (float, int, long))
		for index in xrange(len(list_population)):
			list_population[index][0] = random.lognormvariate(mu, sigma) 
Example #4
Source File: Density.py    From biskit with GNU General Public License v3.0 6 votes vote down vote up
def test_Density(self):
        """Statistics.Density test"""
        import random

        ## a lognormal density distribution the log of which has mean 1.0
        ## and stdev 0.5
        self.X = [ (x, p_lognormal(x, 1.0, 0.5))
                   for x in N0.arange(0.00001, 50, 0.001)]

        alpha = 2.
        beta = 0.6

        self.R = [ random.lognormvariate( alpha, beta )
                   for i in range( 10000 )]

        p = logConfidence( 6.0, self.R )[0]#, area(6.0, alpha, beta) 
Example #5
Source File: __main__.py    From slicesim with MIT License 6 votes vote down vote up
def get_dist(d):
    return {
        'randrange': random.randrange, # start, stop, step
        'randint': random.randint, # a, b
        'random': random.random,
        'uniform': random, # a, b
        'triangular': random.triangular, # low, high, mode
        'beta': random.betavariate, # alpha, beta
        'expo': random.expovariate, # lambda
        'gamma': random.gammavariate, # alpha, beta
        'gauss': random.gauss, # mu, sigma
        'lognorm': random.lognormvariate, # mu, sigma
        'normal': random.normalvariate, # mu, sigma
        'vonmises': random.vonmisesvariate, # mu, kappa
        'pareto': random.paretovariate, # alpha
        'weibull': random.weibullvariate # alpha, beta
    }.get(d) 
Example #6
Source File: test_multiprocessing.py    From CTFCrackTools with GNU General Public License v3.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #7
Source File: lognormal.py    From biskit with GNU General Public License v3.0 5 votes vote down vote up
def test_lognormal(self):
        """Statistics.lognormal test"""
        import random
        import Biskit.gnuplot as gnuplot
        import Biskit.hist as H

        cr = []
        for i in range( 10000 ):
            ## Some random values drawn from the same lognormal distribution 

            alpha = 1.5
            beta = .7
            x = 10.

            R = [ random.lognormvariate( alpha, beta ) for j in range( 10 ) ]

            cr += [ logConfidence( x, R )[0] ]


        ca = logArea( x, alpha, beta )

        if self.local:
            gnuplot.plot( H.density( N0.array(cr) - ca, 100 ) )

            globals().update( locals() )

        self.assertAlmostEqual( ca,  0.86877651432955771, 7) 
Example #8
Source File: test_multiprocessing.py    From CTFCrackTools-V2 with GNU General Public License v3.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #9
Source File: _test_multiprocessing.py    From Project-New-Reign---Nemesis-Main with GNU General Public License v3.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in range(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in list(heap._len_to_seq.values()):
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #10
Source File: test_multiprocessing.py    From gcblue with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #11
Source File: stats.py    From ccs-calendarserver with Apache License 2.0 5 votes vote down vote up
def sample(self):
        result = self._scale * random.lognormvariate(self._mu, self._sigma)
        if self._maximum is not None and result > self._maximum:
            for _ignore in range(10):
                result = self._scale * random.lognormvariate(self._mu, self._sigma)
                if result <= self._maximum:
                    break
            else:
                raise ValueError("Unable to generate LogNormalDistribution sample within required range")
        return result 
Example #12
Source File: _test_multiprocessing.py    From ironpython3 with Apache License 2.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in range(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in list(heap._len_to_seq.values()):
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #13
Source File: _test_multiprocessing.py    From Fluid-Designer with GNU General Public License v3.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in range(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in list(heap._len_to_seq.values()):
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #14
Source File: test_multiprocessing.py    From oss-ftp with MIT License 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #15
Source File: test_multiprocessing.py    From BinderFilter with MIT License 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #16
Source File: test_multiprocessing.py    From ironpython2 with Apache License 2.0 5 votes vote down vote up
def test_heap(self):
        iterations = 5000
        maxblocks = 50
        blocks = []

        # create and destroy lots of blocks of different sizes
        for i in xrange(iterations):
            size = int(random.lognormvariate(0, 1) * 1000)
            b = multiprocessing.heap.BufferWrapper(size)
            blocks.append(b)
            if len(blocks) > maxblocks:
                i = random.randrange(maxblocks)
                del blocks[i]

        # get the heap object
        heap = multiprocessing.heap.BufferWrapper._heap

        # verify the state of the heap
        all = []
        occupied = 0
        heap._lock.acquire()
        self.addCleanup(heap._lock.release)
        for L in heap._len_to_seq.values():
            for arena, start, stop in L:
                all.append((heap._arenas.index(arena), start, stop,
                            stop-start, 'free'))
        for arena, start, stop in heap._allocated_blocks:
            all.append((heap._arenas.index(arena), start, stop,
                        stop-start, 'occupied'))
            occupied += (stop-start)

        all.sort()

        for i in range(len(all)-1):
            (arena, start, stop) = all[i][:3]
            (narena, nstart, nstop) = all[i+1][:3]
            self.assertTrue((arena != narena and nstart == 0) or
                            (stop == nstart)) 
Example #17
Source File: plotly_apps.py    From django-plotly-dash with MIT License 4 votes vote down vote up
def callback_liveOut_pipe_in(named_count, state_uid, **kwargs):
    'Handle something changing the value of the input pipe or the associated state uid'

    cache_key = _get_cache_key(state_uid)
    state = cache.get(cache_key)

    # If nothing in cache, prepopulate
    if not state:
        state = {}

    # Guard against missing input on startup
    if not named_count:
        named_count = {}

    # extract incoming info from the message and update the internal state
    user = named_count.get('user', None)
    click_colour = named_count.get('click_colour', None)
    click_timestamp = named_count.get('click_timestamp', 0)

    if click_colour:
        colour_set = state.get(click_colour, None)

        if not colour_set:
            colour_set = [(None, 0, 100) for i in range(5)]

        _, last_ts, prev = colour_set[-1]

        # Loop over all existing timestamps and find the latest one
        if not click_timestamp or click_timestamp < 1:
            click_timestamp = 0

            for _, the_colour_set in state.items():
                _, lts, _ = the_colour_set[-1]
                if lts > click_timestamp:
                    click_timestamp = lts

            click_timestamp = click_timestamp + 1000

        if click_timestamp > last_ts:
            colour_set.append((user, click_timestamp, prev * random.lognormvariate(0.0, 0.1)),)
            colour_set = colour_set[-100:]

        state[click_colour] = colour_set
        cache.set(cache_key, state, 3600)

    return "(%s,%s)" % (cache_key, click_timestamp)