2000-02-04 23:28:42 +08:00
|
|
|
"""Random variable generators.
|
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
integers
|
|
|
|
--------
|
|
|
|
uniform within range
|
|
|
|
|
|
|
|
sequences
|
|
|
|
---------
|
|
|
|
pick random element
|
2002-11-13 01:41:57 +08:00
|
|
|
pick random sample
|
2001-01-25 11:36:26 +08:00
|
|
|
generate random permutation
|
|
|
|
|
2000-02-04 23:28:42 +08:00
|
|
|
distributions on the real line:
|
|
|
|
------------------------------
|
2001-01-25 11:36:26 +08:00
|
|
|
uniform
|
2000-02-04 23:28:42 +08:00
|
|
|
normal (Gaussian)
|
|
|
|
lognormal
|
|
|
|
negative exponential
|
|
|
|
gamma
|
|
|
|
beta
|
|
|
|
|
|
|
|
distributions on the circle (angles 0 to 2pi)
|
|
|
|
---------------------------------------------
|
|
|
|
circular uniform
|
|
|
|
von Mises
|
|
|
|
|
|
|
|
Translated from anonymously contributed C/C++ source.
|
|
|
|
|
2001-01-26 18:00:39 +08:00
|
|
|
Multi-threading note: the random number generator used here is not thread-
|
|
|
|
safe; it is possible that two calls return the same random value. However,
|
|
|
|
you can instantiate a different instance of Random() in each thread to get
|
|
|
|
generators that don't share state, then use .setstate() and .jumpahead() to
|
|
|
|
move the generators to disjoint segments of the full period. For example,
|
|
|
|
|
|
|
|
def create_generators(num, delta, firstseed=None):
|
|
|
|
""\"Return list of num distinct generators.
|
|
|
|
Each generator has its own unique segment of delta elements from
|
|
|
|
Random.random()'s full period.
|
|
|
|
Seed the first generator with optional arg firstseed (default is
|
|
|
|
None, to seed from current time).
|
|
|
|
""\"
|
|
|
|
|
|
|
|
from random import Random
|
|
|
|
g = Random(firstseed)
|
|
|
|
result = [g]
|
|
|
|
for i in range(num - 1):
|
|
|
|
laststate = g.getstate()
|
|
|
|
g = Random()
|
|
|
|
g.setstate(laststate)
|
|
|
|
g.jumpahead(delta)
|
|
|
|
result.append(g)
|
|
|
|
return result
|
|
|
|
|
|
|
|
gens = create_generators(10, 1000000)
|
|
|
|
|
|
|
|
That creates 10 distinct generators, which can be passed out to 10 distinct
|
|
|
|
threads. The generators don't share state so can be called safely in
|
|
|
|
parallel. So long as no thread calls its g.random() more than a million
|
|
|
|
times (the second argument to create_generators), the sequences seen by
|
|
|
|
each thread will not overlap.
|
|
|
|
|
|
|
|
The period of the underlying Wichmann-Hill generator is 6,953,607,871,644,
|
|
|
|
and that limits how far this technique can be pushed.
|
|
|
|
|
|
|
|
Just for fun, note that since we know the period, .jumpahead() can also be
|
|
|
|
used to "move backward in time":
|
|
|
|
|
|
|
|
>>> g = Random(42) # arbitrary
|
|
|
|
>>> g.random()
|
2001-02-01 12:59:18 +08:00
|
|
|
0.25420336316883324
|
2001-01-26 18:00:39 +08:00
|
|
|
>>> g.jumpahead(6953607871644L - 1) # move *back* one
|
|
|
|
>>> g.random()
|
2001-02-01 12:59:18 +08:00
|
|
|
0.25420336316883324
|
2000-02-04 23:28:42 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# XXX The docstring sucks.
|
1998-05-30 01:51:31 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
from math import log as _log, exp as _exp, pi as _pi, e as _e
|
|
|
|
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
|
2002-08-16 11:41:39 +08:00
|
|
|
from math import floor as _floor
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2002-11-13 01:41:57 +08:00
|
|
|
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
|
2001-02-16 06:15:14 +08:00
|
|
|
"randrange","shuffle","normalvariate","lognormvariate",
|
|
|
|
"cunifvariate","expovariate","vonmisesvariate","gammavariate",
|
|
|
|
"stdgamma","gauss","betavariate","paretovariate","weibullvariate",
|
|
|
|
"getstate","setstate","jumpahead","whseed"]
|
2001-02-16 07:56:39 +08:00
|
|
|
|
2001-11-26 05:12:43 +08:00
|
|
|
def _verify(name, computed, expected):
|
2001-01-25 11:36:26 +08:00
|
|
|
if abs(computed - expected) > 1e-7:
|
|
|
|
raise ValueError(
|
|
|
|
"computed value for %s deviates too much "
|
|
|
|
"(computed %g, expected %g)" % (name, computed, expected))
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
|
2001-11-26 05:12:43 +08:00
|
|
|
_verify('NV_MAGICCONST', NV_MAGICCONST, 1.71552776992141)
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
TWOPI = 2.0*_pi
|
2001-11-26 05:12:43 +08:00
|
|
|
_verify('TWOPI', TWOPI, 6.28318530718)
|
2001-01-15 09:18:21 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
LOG4 = _log(4.0)
|
2001-11-26 05:12:43 +08:00
|
|
|
_verify('LOG4', LOG4, 1.38629436111989)
|
2001-01-15 09:18:21 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
SG_MAGICCONST = 1.0 + _log(4.5)
|
2001-11-26 05:12:43 +08:00
|
|
|
_verify('SG_MAGICCONST', SG_MAGICCONST, 2.50407739677627)
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
del _verify
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
# Translated by Guido van Rossum from C source provided by
|
|
|
|
# Adrian Baddeley.
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
class Random:
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Random number generator base class used by bound module functions.
|
|
|
|
|
|
|
|
Used to instantiate instances of Random to get generators that don't
|
|
|
|
share state. Especially useful for multi-threaded programs, creating
|
|
|
|
a different instance of Random for each thread, and using the jumpahead()
|
|
|
|
method to ensure that the generated sequences seen by each thread don't
|
|
|
|
overlap.
|
|
|
|
|
|
|
|
Class Random can also be subclassed if you want to use a different basic
|
|
|
|
generator of your own devising: in that case, override the following
|
|
|
|
methods: random(), seed(), getstate(), setstate() and jumpahead().
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
VERSION = 1 # used by getstate/setstate
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def __init__(self, x=None):
|
|
|
|
"""Initialize an instance.
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
Optional argument x controls seeding, as for Random.seed().
|
|
|
|
"""
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
self.seed(x)
|
1998-05-21 00:28:24 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- core generator -------------------
|
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
# Specific to Wichmann-Hill generator. Subclasses wishing to use a
|
2001-01-25 14:23:18 +08:00
|
|
|
# different core generator should override the seed(), random(),
|
2001-01-26 04:25:57 +08:00
|
|
|
# getstate(), setstate() and jumpahead() methods.
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-02-01 12:59:18 +08:00
|
|
|
def seed(self, a=None):
|
|
|
|
"""Initialize internal state from hashable object.
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2001-02-01 12:59:18 +08:00
|
|
|
None or no argument seeds from current time.
|
|
|
|
|
2001-02-01 18:06:53 +08:00
|
|
|
If a is not None or an int or long, hash(a) is used instead.
|
2001-02-01 12:59:18 +08:00
|
|
|
|
|
|
|
If a is an int or long, a is used directly. Distinct values between
|
|
|
|
0 and 27814431486575L inclusive are guaranteed to yield distinct
|
|
|
|
internal states (this guarantee is specific to the default
|
|
|
|
Wichmann-Hill generator).
|
2001-01-25 11:36:26 +08:00
|
|
|
"""
|
|
|
|
|
2001-02-01 12:59:18 +08:00
|
|
|
if a is None:
|
2001-01-25 11:36:26 +08:00
|
|
|
# Initialize from current time
|
|
|
|
import time
|
2001-02-01 12:59:18 +08:00
|
|
|
a = long(time.time() * 256)
|
|
|
|
|
|
|
|
if type(a) not in (type(3), type(3L)):
|
|
|
|
a = hash(a)
|
|
|
|
|
|
|
|
a, x = divmod(a, 30268)
|
|
|
|
a, y = divmod(a, 30306)
|
|
|
|
a, z = divmod(a, 30322)
|
|
|
|
self._seed = int(x)+1, int(y)+1, int(z)+1
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2002-05-06 04:40:00 +08:00
|
|
|
self.gauss_next = None
|
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
def random(self):
|
|
|
|
"""Get the next random number in the range [0.0, 1.0)."""
|
|
|
|
|
|
|
|
# Wichman-Hill random number generator.
|
|
|
|
#
|
|
|
|
# Wichmann, B. A. & Hill, I. D. (1982)
|
|
|
|
# Algorithm AS 183:
|
|
|
|
# An efficient and portable pseudo-random number generator
|
|
|
|
# Applied Statistics 31 (1982) 188-190
|
|
|
|
#
|
|
|
|
# see also:
|
|
|
|
# Correction to Algorithm AS 183
|
|
|
|
# Applied Statistics 33 (1984) 123
|
|
|
|
#
|
|
|
|
# McLeod, A. I. (1985)
|
|
|
|
# A remark on Algorithm AS 183
|
|
|
|
# Applied Statistics 34 (1985),198-200
|
|
|
|
|
|
|
|
# This part is thread-unsafe:
|
|
|
|
# BEGIN CRITICAL SECTION
|
|
|
|
x, y, z = self._seed
|
|
|
|
x = (171 * x) % 30269
|
|
|
|
y = (172 * y) % 30307
|
|
|
|
z = (170 * z) % 30323
|
|
|
|
self._seed = x, y, z
|
|
|
|
# END CRITICAL SECTION
|
|
|
|
|
|
|
|
# Note: on a platform using IEEE-754 double arithmetic, this can
|
|
|
|
# never return 0.0 (asserted by Tim; proof too long for a comment).
|
|
|
|
return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
|
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def getstate(self):
|
|
|
|
"""Return internal state; can be passed to setstate() later."""
|
|
|
|
return self.VERSION, self._seed, self.gauss_next
|
|
|
|
|
|
|
|
def setstate(self, state):
|
|
|
|
"""Restore internal state from object returned by getstate()."""
|
|
|
|
version = state[0]
|
|
|
|
if version == 1:
|
|
|
|
version, self._seed, self.gauss_next = state
|
|
|
|
else:
|
|
|
|
raise ValueError("state with version %s passed to "
|
|
|
|
"Random.setstate() of version %s" %
|
|
|
|
(version, self.VERSION))
|
|
|
|
|
2001-01-25 14:23:18 +08:00
|
|
|
def jumpahead(self, n):
|
|
|
|
"""Act as if n calls to random() were made, but quickly.
|
|
|
|
|
|
|
|
n is an int, greater than or equal to 0.
|
|
|
|
|
|
|
|
Example use: If you have 2 threads and know that each will
|
|
|
|
consume no more than a million random numbers, create two Random
|
|
|
|
objects r1 and r2, then do
|
|
|
|
r2.setstate(r1.getstate())
|
|
|
|
r2.jumpahead(1000000)
|
|
|
|
Then r1 and r2 will use guaranteed-disjoint segments of the full
|
|
|
|
period.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not n >= 0:
|
|
|
|
raise ValueError("n must be >= 0")
|
|
|
|
x, y, z = self._seed
|
|
|
|
x = int(x * pow(171, n, 30269)) % 30269
|
|
|
|
y = int(y * pow(172, n, 30307)) % 30307
|
|
|
|
z = int(z * pow(170, n, 30323)) % 30323
|
|
|
|
self._seed = x, y, z
|
|
|
|
|
2001-02-01 12:59:18 +08:00
|
|
|
def __whseed(self, x=0, y=0, z=0):
|
|
|
|
"""Set the Wichmann-Hill seed from (x, y, z).
|
|
|
|
|
|
|
|
These must be integers in the range [0, 256).
|
|
|
|
"""
|
|
|
|
|
2002-11-18 17:01:24 +08:00
|
|
|
if not type(x) == type(y) == type(z) == int:
|
2001-02-01 12:59:18 +08:00
|
|
|
raise TypeError('seeds must be integers')
|
|
|
|
if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
|
|
|
|
raise ValueError('seeds must be in range(0, 256)')
|
|
|
|
if 0 == x == y == z:
|
|
|
|
# Initialize from current time
|
|
|
|
import time
|
|
|
|
t = long(time.time() * 256)
|
|
|
|
t = int((t&0xffffff) ^ (t>>24))
|
|
|
|
t, x = divmod(t, 256)
|
|
|
|
t, y = divmod(t, 256)
|
|
|
|
t, z = divmod(t, 256)
|
|
|
|
# Zero is a poor seed, so substitute 1
|
|
|
|
self._seed = (x or 1, y or 1, z or 1)
|
|
|
|
|
2002-05-06 04:40:00 +08:00
|
|
|
self.gauss_next = None
|
|
|
|
|
2001-02-01 12:59:18 +08:00
|
|
|
def whseed(self, a=None):
|
|
|
|
"""Seed from hashable object's hash code.
|
|
|
|
|
|
|
|
None or no argument seeds from current time. It is not guaranteed
|
|
|
|
that objects with distinct hash codes lead to distinct internal
|
|
|
|
states.
|
|
|
|
|
|
|
|
This is obsolete, provided for compatibility with the seed routine
|
|
|
|
used prior to Python 2.1. Use the .seed() method instead.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if a is None:
|
|
|
|
self.__whseed()
|
|
|
|
return
|
|
|
|
a = hash(a)
|
|
|
|
a, x = divmod(a, 256)
|
|
|
|
a, y = divmod(a, 256)
|
|
|
|
a, z = divmod(a, 256)
|
|
|
|
x = (x + a) % 256 or 1
|
|
|
|
y = (y + a) % 256 or 1
|
|
|
|
z = (z + a) % 256 or 1
|
|
|
|
self.__whseed(x, y, z)
|
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## ---- Methods below this point do not need to be overridden when
|
|
|
|
## ---- subclassing for the purpose of using a different core generator.
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- pickle support -------------------
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
def __getstate__(self): # for pickle
|
|
|
|
return self.getstate()
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
def __setstate__(self, state): # for pickle
|
|
|
|
self.setstate(state)
|
|
|
|
|
|
|
|
## -------------------- integer methods -------------------
|
2001-01-25 11:36:26 +08:00
|
|
|
|
|
|
|
def randrange(self, start, stop=None, step=1, int=int, default=None):
|
|
|
|
"""Choose a random item from range(start, stop[, step]).
|
|
|
|
|
|
|
|
This fixes the problem with randint() which includes the
|
|
|
|
endpoint; in Python this is usually not what you want.
|
|
|
|
Do not supply the 'int' and 'default' arguments.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# This code is a bit messy to make it fast for the
|
2002-08-16 11:41:39 +08:00
|
|
|
# common case while still doing adequate error checking.
|
2001-01-25 11:36:26 +08:00
|
|
|
istart = int(start)
|
|
|
|
if istart != start:
|
|
|
|
raise ValueError, "non-integer arg 1 for randrange()"
|
|
|
|
if stop is default:
|
|
|
|
if istart > 0:
|
|
|
|
return int(self.random() * istart)
|
|
|
|
raise ValueError, "empty range for randrange()"
|
2002-08-16 11:41:39 +08:00
|
|
|
|
|
|
|
# stop argument supplied.
|
2001-01-25 11:36:26 +08:00
|
|
|
istop = int(stop)
|
|
|
|
if istop != stop:
|
|
|
|
raise ValueError, "non-integer stop for randrange()"
|
2002-08-16 11:41:39 +08:00
|
|
|
if step == 1 and istart < istop:
|
|
|
|
try:
|
|
|
|
return istart + int(self.random()*(istop - istart))
|
|
|
|
except OverflowError:
|
|
|
|
# This can happen if istop-istart > sys.maxint + 1, and
|
|
|
|
# multiplying by random() doesn't reduce it to something
|
|
|
|
# <= sys.maxint. We know that the overall result fits
|
|
|
|
# in an int, and can still do it correctly via math.floor().
|
|
|
|
# But that adds another function call, so for speed we
|
|
|
|
# avoided that whenever possible.
|
|
|
|
return int(istart + _floor(self.random()*(istop - istart)))
|
2001-01-25 11:36:26 +08:00
|
|
|
if step == 1:
|
|
|
|
raise ValueError, "empty range for randrange()"
|
2002-08-16 11:41:39 +08:00
|
|
|
|
|
|
|
# Non-unit step argument supplied.
|
2001-01-25 11:36:26 +08:00
|
|
|
istep = int(step)
|
|
|
|
if istep != step:
|
|
|
|
raise ValueError, "non-integer step for randrange()"
|
|
|
|
if istep > 0:
|
|
|
|
n = (istop - istart + istep - 1) / istep
|
|
|
|
elif istep < 0:
|
|
|
|
n = (istop - istart + istep + 1) / istep
|
|
|
|
else:
|
|
|
|
raise ValueError, "zero step for randrange()"
|
|
|
|
|
|
|
|
if n <= 0:
|
|
|
|
raise ValueError, "empty range for randrange()"
|
|
|
|
return istart + istep*int(self.random() * n)
|
|
|
|
|
|
|
|
def randint(self, a, b):
|
2001-01-26 04:25:57 +08:00
|
|
|
"""Return random integer in range [a, b], including both end points.
|
2001-01-25 11:36:26 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
return self.randrange(a, b+1)
|
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- sequence methods -------------------
|
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def choice(self, seq):
|
|
|
|
"""Choose a random element from a non-empty sequence."""
|
|
|
|
return seq[int(self.random() * len(seq))]
|
|
|
|
|
|
|
|
def shuffle(self, x, random=None, int=int):
|
|
|
|
"""x, random=random.random -> shuffle list x in place; return None.
|
|
|
|
|
|
|
|
Optional arg random is a 0-argument function returning a random
|
|
|
|
float in [0.0, 1.0); by default, the standard random.random.
|
|
|
|
|
|
|
|
Note that for even rather small len(x), the total number of
|
|
|
|
permutations of x is larger than the period of most random number
|
|
|
|
generators; this implies that "most" permutations of a long
|
|
|
|
sequence can never be generated.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if random is None:
|
|
|
|
random = self.random
|
|
|
|
for i in xrange(len(x)-1, 0, -1):
|
2001-01-26 04:25:57 +08:00
|
|
|
# pick an element in x[:i+1] with which to exchange x[i]
|
2001-01-25 11:36:26 +08:00
|
|
|
j = int(random() * (i+1))
|
|
|
|
x[i], x[j] = x[j], x[i]
|
|
|
|
|
2002-11-13 01:41:57 +08:00
|
|
|
def sample(self, population, k, random=None, int=int):
|
|
|
|
"""Chooses k unique random elements from a population sequence.
|
|
|
|
|
2002-11-13 23:26:37 +08:00
|
|
|
Returns a new list containing elements from the population while
|
|
|
|
leaving the original population unchanged. The resulting list is
|
|
|
|
in selection order so that all sub-slices will also be valid random
|
|
|
|
samples. This allows raffle winners (the sample) to be partitioned
|
|
|
|
into grand prize and second place winners (the subslices).
|
2002-11-13 01:41:57 +08:00
|
|
|
|
2002-11-13 23:26:37 +08:00
|
|
|
Members of the population need not be hashable or unique. If the
|
|
|
|
population contains repeats, then each occurrence is a possible
|
|
|
|
selection in the sample.
|
2002-11-13 01:41:57 +08:00
|
|
|
|
2002-11-13 23:26:37 +08:00
|
|
|
To choose a sample in a range of integers, use xrange as an argument.
|
|
|
|
This is especially fast and space efficient for sampling from a
|
|
|
|
large population: sample(xrange(10000000), 60)
|
2002-11-13 01:41:57 +08:00
|
|
|
|
|
|
|
Optional arg random is a 0-argument function returning a random
|
|
|
|
float in [0.0, 1.0); by default, the standard random.random.
|
|
|
|
"""
|
|
|
|
|
2002-11-13 23:26:37 +08:00
|
|
|
# Sampling without replacement entails tracking either potential
|
|
|
|
# selections (the pool) or previous selections.
|
|
|
|
|
|
|
|
# Pools are stored in lists which provide __getitem__ for selection
|
|
|
|
# and provide a way to remove selections. But each list.remove()
|
|
|
|
# rebuilds the entire list, so it is better to rearrange the list,
|
|
|
|
# placing non-selected elements at the head of the list. Tracking
|
|
|
|
# the selection pool is only space efficient with small populations.
|
|
|
|
|
|
|
|
# Previous selections are stored in dictionaries which provide
|
|
|
|
# __contains__ for detecting repeat selections. Discarding repeats
|
|
|
|
# is efficient unless most of the population has already been chosen.
|
2002-11-18 17:01:24 +08:00
|
|
|
# So, tracking selections is fast only with small sample sizes.
|
2002-11-13 23:26:37 +08:00
|
|
|
|
2002-11-13 01:41:57 +08:00
|
|
|
n = len(population)
|
|
|
|
if not 0 <= k <= n:
|
|
|
|
raise ValueError, "sample larger than population"
|
|
|
|
if random is None:
|
|
|
|
random = self.random
|
2002-11-13 23:26:37 +08:00
|
|
|
result = [None] * k
|
2002-11-13 01:41:57 +08:00
|
|
|
if n < 6 * k: # if n len list takes less space than a k len dict
|
2002-11-18 17:01:24 +08:00
|
|
|
pool = list(population)
|
|
|
|
for i in xrange(k): # invariant: non-selected at [0,n-i)
|
|
|
|
j = int(random() * (n-i))
|
|
|
|
result[i] = pool[j]
|
|
|
|
pool[j] = pool[n-i-1]
|
2002-11-13 23:26:37 +08:00
|
|
|
else:
|
2002-11-18 17:01:24 +08:00
|
|
|
selected = {}
|
2002-11-13 23:26:37 +08:00
|
|
|
for i in xrange(k):
|
2002-11-13 01:41:57 +08:00
|
|
|
j = int(random() * n)
|
2002-11-18 17:01:24 +08:00
|
|
|
while j in selected:
|
2002-11-13 23:26:37 +08:00
|
|
|
j = int(random() * n)
|
|
|
|
result[i] = selected[j] = population[j]
|
2002-11-18 17:01:24 +08:00
|
|
|
return result
|
2002-11-13 01:41:57 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- real-valued distributions -------------------
|
|
|
|
|
|
|
|
## -------------------- uniform distribution -------------------
|
2001-01-25 11:36:26 +08:00
|
|
|
|
|
|
|
def uniform(self, a, b):
|
|
|
|
"""Get a random number in the range [a, b)."""
|
|
|
|
return a + (b-a) * self.random()
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- normal distribution --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def normalvariate(self, mu, sigma):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Normal distribution.
|
|
|
|
|
|
|
|
mu is the mean, and sigma is the standard deviation.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# mu = mean, sigma = standard deviation
|
|
|
|
|
|
|
|
# Uses Kinderman and Monahan method. Reference: Kinderman,
|
|
|
|
# A.J. and Monahan, J.F., "Computer generation of random
|
|
|
|
# variables using the ratio of uniform deviates", ACM Trans
|
|
|
|
# Math Software, 3, (1977), pp257-260.
|
|
|
|
|
|
|
|
random = self.random
|
2002-11-18 17:01:24 +08:00
|
|
|
while True:
|
2001-01-25 11:36:26 +08:00
|
|
|
u1 = random()
|
|
|
|
u2 = random()
|
|
|
|
z = NV_MAGICCONST*(u1-0.5)/u2
|
|
|
|
zz = z*z/4.0
|
|
|
|
if zz <= -_log(u2):
|
|
|
|
break
|
|
|
|
return mu + z*sigma
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- lognormal distribution --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def lognormvariate(self, mu, sigma):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Log normal distribution.
|
|
|
|
|
|
|
|
If you take the natural logarithm of this distribution, you'll get a
|
|
|
|
normal distribution with mean mu and standard deviation sigma.
|
|
|
|
mu can have any value, and sigma must be greater than zero.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
return _exp(self.normalvariate(mu, sigma))
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- circular uniform --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def cunifvariate(self, mean, arc):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Circular uniform distribution.
|
|
|
|
|
|
|
|
mean is the mean angle, and arc is the range of the distribution,
|
|
|
|
centered around the mean angle. Both values must be expressed in
|
|
|
|
radians. Returned values range between mean - arc/2 and
|
|
|
|
mean + arc/2 and are normalized to between 0 and pi.
|
|
|
|
|
|
|
|
Deprecated in version 2.3. Use:
|
|
|
|
(mean + arc * (Random.random() - 0.5)) % Math.pi
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# mean: mean angle (in radians between 0 and pi)
|
|
|
|
# arc: range of distribution (in radians between 0 and pi)
|
2002-05-24 03:44:49 +08:00
|
|
|
import warnings
|
|
|
|
warnings.warn("The cunifvariate function is deprecated; Use (mean "
|
|
|
|
"+ arc * (Random.random() - 0.5)) % Math.pi instead",
|
|
|
|
DeprecationWarning)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
return (mean + arc * (self.random() - 0.5)) % _pi
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- exponential distribution --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def expovariate(self, lambd):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Exponential distribution.
|
|
|
|
|
|
|
|
lambd is 1.0 divided by the desired mean. (The parameter would be
|
|
|
|
called "lambda", but that is a reserved word in Python.) Returned
|
|
|
|
values range from 0 to positive infinity.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# lambd: rate lambd = 1/mean
|
|
|
|
# ('lambda' is a Python reserved word)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
random = self.random
|
2001-01-15 09:18:21 +08:00
|
|
|
u = random()
|
2001-01-25 11:36:26 +08:00
|
|
|
while u <= 1e-7:
|
|
|
|
u = random()
|
|
|
|
return -_log(u)/lambd
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- von Mises distribution --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def vonmisesvariate(self, mu, kappa):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Circular data distribution.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
mu is the mean angle, expressed in radians between 0 and 2*pi, and
|
|
|
|
kappa is the concentration parameter, which must be greater than or
|
|
|
|
equal to zero. If kappa is equal to zero, this distribution reduces
|
|
|
|
to a uniform random angle over the range 0 to 2*pi.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# mu: mean angle (in radians between 0 and 2*pi)
|
|
|
|
# kappa: concentration parameter kappa (>= 0)
|
|
|
|
# if kappa = 0 generate uniform random angle
|
1998-04-06 22:12:13 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
# Based upon an algorithm published in: Fisher, N.I.,
|
|
|
|
# "Statistical Analysis of Circular Data", Cambridge
|
|
|
|
# University Press, 1993.
|
1998-04-06 22:12:13 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
# Thanks to Magnus Kessler for a correction to the
|
|
|
|
# implementation of step 4.
|
1998-04-06 22:12:13 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
random = self.random
|
|
|
|
if kappa <= 1e-6:
|
|
|
|
return TWOPI * random()
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
|
|
|
|
b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
|
|
|
|
r = (1.0 + b * b)/(2.0 * b)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2002-11-18 17:01:24 +08:00
|
|
|
while True:
|
2001-01-25 11:36:26 +08:00
|
|
|
u1 = random()
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
z = _cos(_pi * u1)
|
|
|
|
f = (1.0 + r * z)/(r + z)
|
|
|
|
c = kappa * (r - f)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
u2 = random()
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
if not (u2 >= c * (2.0 - c) and u2 > c * _exp(1.0 - c)):
|
|
|
|
break
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
u3 = random()
|
|
|
|
if u3 > 0.5:
|
|
|
|
theta = (mu % TWOPI) + _acos(f)
|
|
|
|
else:
|
|
|
|
theta = (mu % TWOPI) - _acos(f)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
return theta
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- gamma distribution --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def gammavariate(self, alpha, beta):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Gamma distribution. Not the gamma function!
|
|
|
|
|
|
|
|
Conditions on the parameters are alpha > 0 and beta > 0.
|
|
|
|
|
|
|
|
"""
|
2002-05-23 23:15:30 +08:00
|
|
|
|
2002-05-14 14:40:34 +08:00
|
|
|
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
|
2002-05-23 23:15:30 +08:00
|
|
|
|
2002-05-14 22:08:12 +08:00
|
|
|
# Warning: a few older sources define the gamma distribution in terms
|
|
|
|
# of alpha > -1.0
|
|
|
|
if alpha <= 0.0 or beta <= 0.0:
|
|
|
|
raise ValueError, 'gammavariate: alpha and beta must be > 0.0'
|
2002-05-23 23:15:30 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
random = self.random
|
|
|
|
if alpha > 1.0:
|
|
|
|
|
|
|
|
# Uses R.C.H. Cheng, "The generation of Gamma
|
|
|
|
# variables with non-integral shape parameters",
|
|
|
|
# Applied Statistics, (1977), 26, No. 1, p71-74
|
|
|
|
|
2002-05-14 07:40:14 +08:00
|
|
|
ainv = _sqrt(2.0 * alpha - 1.0)
|
|
|
|
bbb = alpha - LOG4
|
|
|
|
ccc = alpha + ainv
|
2002-05-23 23:15:30 +08:00
|
|
|
|
2002-11-18 17:01:24 +08:00
|
|
|
while True:
|
2001-01-25 11:36:26 +08:00
|
|
|
u1 = random()
|
|
|
|
u2 = random()
|
|
|
|
v = _log(u1/(1.0-u1))/ainv
|
|
|
|
x = alpha*_exp(v)
|
|
|
|
z = u1*u1*u2
|
|
|
|
r = bbb+ccc*v-x
|
|
|
|
if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
|
2002-05-14 14:40:34 +08:00
|
|
|
return x * beta
|
2001-01-25 11:36:26 +08:00
|
|
|
|
|
|
|
elif alpha == 1.0:
|
|
|
|
# expovariate(1)
|
2001-01-15 09:18:21 +08:00
|
|
|
u = random()
|
2001-01-25 11:36:26 +08:00
|
|
|
while u <= 1e-7:
|
|
|
|
u = random()
|
2002-05-14 14:40:34 +08:00
|
|
|
return -_log(u) * beta
|
2001-01-25 11:36:26 +08:00
|
|
|
|
|
|
|
else: # alpha is between 0 and 1 (exclusive)
|
|
|
|
|
|
|
|
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
|
|
|
|
|
2002-11-18 17:01:24 +08:00
|
|
|
while True:
|
2001-01-25 11:36:26 +08:00
|
|
|
u = random()
|
|
|
|
b = (_e + alpha)/_e
|
|
|
|
p = b*u
|
|
|
|
if p <= 1.0:
|
|
|
|
x = pow(p, 1.0/alpha)
|
|
|
|
else:
|
|
|
|
# p > 1
|
|
|
|
x = -_log((b-p)/alpha)
|
|
|
|
u1 = random()
|
|
|
|
if not (((p <= 1.0) and (u1 > _exp(-x))) or
|
|
|
|
((p > 1) and (u1 > pow(x, alpha - 1.0)))):
|
|
|
|
break
|
2002-05-14 14:40:34 +08:00
|
|
|
return x * beta
|
|
|
|
|
|
|
|
|
|
|
|
def stdgamma(self, alpha, ainv, bbb, ccc):
|
|
|
|
# This method was (and shall remain) undocumented.
|
|
|
|
# This method is deprecated
|
|
|
|
# for the following reasons:
|
|
|
|
# 1. Returns same as .gammavariate(alpha, 1.0)
|
|
|
|
# 2. Requires caller to provide 3 extra arguments
|
|
|
|
# that are functions of alpha anyway
|
|
|
|
# 3. Can't be used for alpha < 0.5
|
|
|
|
|
|
|
|
# ainv = sqrt(2 * alpha - 1)
|
|
|
|
# bbb = alpha - log(4)
|
|
|
|
# ccc = alpha + ainv
|
|
|
|
import warnings
|
|
|
|
warnings.warn("The stdgamma function is deprecated; "
|
|
|
|
"use gammavariate() instead",
|
|
|
|
DeprecationWarning)
|
|
|
|
return self.gammavariate(alpha, 1.0)
|
|
|
|
|
1994-03-09 20:55:02 +08:00
|
|
|
|
1994-03-09 22:21:05 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- Gauss (faster alternative) --------------------
|
1994-03-09 22:21:05 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def gauss(self, mu, sigma):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Gaussian distribution.
|
|
|
|
|
|
|
|
mu is the mean, and sigma is the standard deviation. This is
|
|
|
|
slightly faster than the normalvariate() function.
|
|
|
|
|
|
|
|
Not thread-safe without a lock around calls.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
|
|
|
|
# When x and y are two variables from [0, 1), uniformly
|
|
|
|
# distributed, then
|
|
|
|
#
|
|
|
|
# cos(2*pi*x)*sqrt(-2*log(1-y))
|
|
|
|
# sin(2*pi*x)*sqrt(-2*log(1-y))
|
|
|
|
#
|
|
|
|
# are two *independent* variables with normal distribution
|
|
|
|
# (mu = 0, sigma = 1).
|
|
|
|
# (Lambert Meertens)
|
|
|
|
# (corrected version; bug discovered by Mike Miller, fixed by LM)
|
|
|
|
|
|
|
|
# Multithreading note: When two threads call this function
|
|
|
|
# simultaneously, it is possible that they will receive the
|
|
|
|
# same return value. The window is very small though. To
|
|
|
|
# avoid this, you have to use a lock around all calls. (I
|
|
|
|
# didn't want to slow this down in the serial case by using a
|
|
|
|
# lock here.)
|
|
|
|
|
|
|
|
random = self.random
|
|
|
|
z = self.gauss_next
|
|
|
|
self.gauss_next = None
|
|
|
|
if z is None:
|
|
|
|
x2pi = random() * TWOPI
|
|
|
|
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
|
|
|
|
z = _cos(x2pi) * g2rad
|
|
|
|
self.gauss_next = _sin(x2pi) * g2rad
|
|
|
|
|
|
|
|
return mu + z*sigma
|
1994-03-09 22:21:05 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- beta --------------------
|
2001-01-26 14:49:56 +08:00
|
|
|
## See
|
|
|
|
## http://sourceforge.net/bugs/?func=detailbug&bug_id=130030&group_id=5470
|
|
|
|
## for Ivan Frohne's insightful analysis of why the original implementation:
|
|
|
|
##
|
|
|
|
## def betavariate(self, alpha, beta):
|
|
|
|
## # Discrete Event Simulation in C, pp 87-88.
|
|
|
|
##
|
|
|
|
## y = self.expovariate(alpha)
|
|
|
|
## z = self.expovariate(1.0/beta)
|
|
|
|
## return z/(y+z)
|
|
|
|
##
|
|
|
|
## was dead wrong, and how it probably got that way.
|
1994-03-09 22:21:05 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def betavariate(self, alpha, beta):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Beta distribution.
|
|
|
|
|
|
|
|
Conditions on the parameters are alpha > -1 and beta} > -1.
|
|
|
|
Returned values range between 0 and 1.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2001-01-26 14:49:56 +08:00
|
|
|
# This version due to Janne Sinkkonen, and matches all the std
|
|
|
|
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
|
|
|
|
y = self.gammavariate(alpha, 1.)
|
|
|
|
if y == 0:
|
|
|
|
return 0.0
|
|
|
|
else:
|
|
|
|
return y / (y + self.gammavariate(beta, 1.))
|
1994-03-09 22:21:05 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- Pareto --------------------
|
1997-12-02 10:47:39 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def paretovariate(self, alpha):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Pareto distribution. alpha is the shape parameter."""
|
2001-01-25 11:36:26 +08:00
|
|
|
# Jain, pg. 495
|
1997-12-02 10:47:39 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
u = self.random()
|
|
|
|
return 1.0 / pow(u, 1.0/alpha)
|
1997-12-02 10:47:39 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- Weibull --------------------
|
1997-12-02 10:47:39 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def weibullvariate(self, alpha, beta):
|
2002-05-24 03:44:49 +08:00
|
|
|
"""Weibull distribution.
|
|
|
|
|
|
|
|
alpha is the scale parameter and beta is the shape parameter.
|
2002-05-24 07:58:17 +08:00
|
|
|
|
2002-05-24 03:44:49 +08:00
|
|
|
"""
|
2001-01-25 11:36:26 +08:00
|
|
|
# Jain, pg. 499; bug fix courtesy Bill Arms
|
1997-12-02 10:47:39 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
u = self.random()
|
|
|
|
return alpha * pow(-_log(u), 1.0/beta)
|
1999-08-18 21:53:28 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
## -------------------- test program --------------------
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2001-01-25 11:36:26 +08:00
|
|
|
def _test_generator(n, funccall):
|
2001-01-15 09:18:21 +08:00
|
|
|
import time
|
|
|
|
print n, 'times', funccall
|
|
|
|
code = compile(funccall, funccall, 'eval')
|
|
|
|
sum = 0.0
|
|
|
|
sqsum = 0.0
|
|
|
|
smallest = 1e10
|
|
|
|
largest = -1e10
|
|
|
|
t0 = time.time()
|
|
|
|
for i in range(n):
|
|
|
|
x = eval(code)
|
|
|
|
sum = sum + x
|
|
|
|
sqsum = sqsum + x*x
|
|
|
|
smallest = min(x, smallest)
|
|
|
|
largest = max(x, largest)
|
|
|
|
t1 = time.time()
|
|
|
|
print round(t1-t0, 3), 'sec,',
|
|
|
|
avg = sum/n
|
2001-01-25 11:36:26 +08:00
|
|
|
stddev = _sqrt(sqsum/n - avg*avg)
|
2001-01-15 09:18:21 +08:00
|
|
|
print 'avg %g, stddev %g, min %g, max %g' % \
|
|
|
|
(avg, stddev, smallest, largest)
|
1994-03-09 20:55:02 +08:00
|
|
|
|
2002-11-13 01:41:57 +08:00
|
|
|
def _test_sample(n):
|
|
|
|
# For the entire allowable range of 0 <= k <= n, validate that
|
|
|
|
# the sample is of the correct length and contains only unique items
|
|
|
|
population = xrange(n)
|
|
|
|
for k in xrange(n+1):
|
|
|
|
s = sample(population, k)
|
2002-12-07 17:25:05 +08:00
|
|
|
uniq = dict.fromkeys(s)
|
|
|
|
assert len(uniq) == len(s) == k
|
|
|
|
assert None not in uniq
|
2002-11-13 01:41:57 +08:00
|
|
|
|
|
|
|
def _sample_generator(n, k):
|
|
|
|
# Return a fixed element from the sample. Validates random ordering.
|
|
|
|
return sample(xrange(n), k)[k//2]
|
|
|
|
|
|
|
|
def _test(N=2000):
|
2001-01-25 11:36:26 +08:00
|
|
|
print 'TWOPI =', TWOPI
|
|
|
|
print 'LOG4 =', LOG4
|
|
|
|
print 'NV_MAGICCONST =', NV_MAGICCONST
|
|
|
|
print 'SG_MAGICCONST =', SG_MAGICCONST
|
|
|
|
_test_generator(N, 'random()')
|
|
|
|
_test_generator(N, 'normalvariate(0.0, 1.0)')
|
|
|
|
_test_generator(N, 'lognormvariate(0.0, 1.0)')
|
|
|
|
_test_generator(N, 'cunifvariate(0.0, 1.0)')
|
|
|
|
_test_generator(N, 'expovariate(1.0)')
|
|
|
|
_test_generator(N, 'vonmisesvariate(0.0, 1.0)')
|
2002-05-14 14:40:34 +08:00
|
|
|
_test_generator(N, 'gammavariate(0.01, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(0.1, 1.0)')
|
2002-05-23 23:15:30 +08:00
|
|
|
_test_generator(N, 'gammavariate(0.1, 2.0)')
|
2001-01-25 11:36:26 +08:00
|
|
|
_test_generator(N, 'gammavariate(0.5, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(0.9, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(1.0, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(2.0, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(20.0, 1.0)')
|
|
|
|
_test_generator(N, 'gammavariate(200.0, 1.0)')
|
|
|
|
_test_generator(N, 'gauss(0.0, 1.0)')
|
|
|
|
_test_generator(N, 'betavariate(3.0, 3.0)')
|
|
|
|
_test_generator(N, 'paretovariate(1.0)')
|
|
|
|
_test_generator(N, 'weibullvariate(1.0, 1.0)')
|
2002-11-13 01:41:57 +08:00
|
|
|
_test_generator(N, '_sample_generator(50, 5)') # expected s.d.: 14.4
|
|
|
|
_test_generator(N, '_sample_generator(50, 45)') # expected s.d.: 14.4
|
2002-11-13 23:26:37 +08:00
|
|
|
_test_sample(500)
|
2001-01-25 11:36:26 +08:00
|
|
|
|
2001-01-26 04:25:57 +08:00
|
|
|
# Test jumpahead.
|
|
|
|
s = getstate()
|
|
|
|
jumpahead(N)
|
|
|
|
r1 = random()
|
|
|
|
# now do it the slow way
|
|
|
|
setstate(s)
|
|
|
|
for i in range(N):
|
|
|
|
random()
|
|
|
|
r2 = random()
|
|
|
|
if r1 != r2:
|
|
|
|
raise ValueError("jumpahead test failed " + `(N, r1, r2)`)
|
|
|
|
|
2001-01-27 06:56:56 +08:00
|
|
|
# Create one instance, seeded from current time, and export its methods
|
|
|
|
# as module-level functions. The functions are not threadsafe, and state
|
|
|
|
# is shared across all uses (both in the user's code and in the Python
|
|
|
|
# libraries), but that's fine for most programs and is easier for the
|
|
|
|
# casual user than making them instantiate their own Random() instance.
|
2001-01-25 11:36:26 +08:00
|
|
|
_inst = Random()
|
|
|
|
seed = _inst.seed
|
|
|
|
random = _inst.random
|
|
|
|
uniform = _inst.uniform
|
|
|
|
randint = _inst.randint
|
|
|
|
choice = _inst.choice
|
|
|
|
randrange = _inst.randrange
|
2002-11-13 01:41:57 +08:00
|
|
|
sample = _inst.sample
|
2001-01-25 11:36:26 +08:00
|
|
|
shuffle = _inst.shuffle
|
|
|
|
normalvariate = _inst.normalvariate
|
|
|
|
lognormvariate = _inst.lognormvariate
|
|
|
|
cunifvariate = _inst.cunifvariate
|
|
|
|
expovariate = _inst.expovariate
|
|
|
|
vonmisesvariate = _inst.vonmisesvariate
|
|
|
|
gammavariate = _inst.gammavariate
|
|
|
|
stdgamma = _inst.stdgamma
|
|
|
|
gauss = _inst.gauss
|
|
|
|
betavariate = _inst.betavariate
|
|
|
|
paretovariate = _inst.paretovariate
|
|
|
|
weibullvariate = _inst.weibullvariate
|
|
|
|
getstate = _inst.getstate
|
|
|
|
setstate = _inst.setstate
|
2001-01-25 14:23:18 +08:00
|
|
|
jumpahead = _inst.jumpahead
|
2001-02-01 12:59:18 +08:00
|
|
|
whseed = _inst.whseed
|
2001-01-25 11:36:26 +08:00
|
|
|
|
1994-03-09 20:55:02 +08:00
|
|
|
if __name__ == '__main__':
|
2001-01-25 11:36:26 +08:00
|
|
|
_test()
|