xref: /freebsd/sys/libkern/arc4random_uniform.c (revision 95ee2897e98f5d444f26ed2334cc7c439f9c16c6)
1*353d02e9SEmmanuel Vadot /*	$OpenBSD: arc4random_uniform.c,v 1.3 2019/01/20 02:59:07 bcook Exp $	*/
2*353d02e9SEmmanuel Vadot 
3*353d02e9SEmmanuel Vadot /*
4*353d02e9SEmmanuel Vadot  * Copyright (c) 2008, Damien Miller <djm@openbsd.org>
5*353d02e9SEmmanuel Vadot  *
6*353d02e9SEmmanuel Vadot  * Permission to use, copy, modify, and distribute this software for any
7*353d02e9SEmmanuel Vadot  * purpose with or without fee is hereby granted, provided that the above
8*353d02e9SEmmanuel Vadot  * copyright notice and this permission notice appear in all copies.
9*353d02e9SEmmanuel Vadot  *
10*353d02e9SEmmanuel Vadot  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11*353d02e9SEmmanuel Vadot  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12*353d02e9SEmmanuel Vadot  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13*353d02e9SEmmanuel Vadot  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14*353d02e9SEmmanuel Vadot  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15*353d02e9SEmmanuel Vadot  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16*353d02e9SEmmanuel Vadot  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17*353d02e9SEmmanuel Vadot  */
18*353d02e9SEmmanuel Vadot 
19*353d02e9SEmmanuel Vadot #include <sys/types.h>
20*353d02e9SEmmanuel Vadot #include <sys/libkern.h>
21*353d02e9SEmmanuel Vadot 
22*353d02e9SEmmanuel Vadot /*
23*353d02e9SEmmanuel Vadot  * Calculate a uniformly distributed random number less than upper_bound
24*353d02e9SEmmanuel Vadot  * avoiding "modulo bias".
25*353d02e9SEmmanuel Vadot  *
26*353d02e9SEmmanuel Vadot  * Uniformity is achieved by generating new random numbers until the one
27*353d02e9SEmmanuel Vadot  * returned is outside the range [0, 2**32 % upper_bound).  This
28*353d02e9SEmmanuel Vadot  * guarantees the selected random number will be inside
29*353d02e9SEmmanuel Vadot  * [2**32 % upper_bound, 2**32) which maps back to [0, upper_bound)
30*353d02e9SEmmanuel Vadot  * after reduction modulo upper_bound.
31*353d02e9SEmmanuel Vadot  */
32*353d02e9SEmmanuel Vadot uint32_t
arc4random_uniform(uint32_t upper_bound)33*353d02e9SEmmanuel Vadot arc4random_uniform(uint32_t upper_bound)
34*353d02e9SEmmanuel Vadot {
35*353d02e9SEmmanuel Vadot 	uint32_t r, min;
36*353d02e9SEmmanuel Vadot 
37*353d02e9SEmmanuel Vadot 	if (upper_bound < 2)
38*353d02e9SEmmanuel Vadot 		return 0;
39*353d02e9SEmmanuel Vadot 
40*353d02e9SEmmanuel Vadot 	/* 2**32 % x == (2**32 - x) % x */
41*353d02e9SEmmanuel Vadot 	min = -upper_bound % upper_bound;
42*353d02e9SEmmanuel Vadot 
43*353d02e9SEmmanuel Vadot 	/*
44*353d02e9SEmmanuel Vadot 	 * This could theoretically loop forever but each retry has
45*353d02e9SEmmanuel Vadot 	 * p > 0.5 (worst case, usually far better) of selecting a
46*353d02e9SEmmanuel Vadot 	 * number inside the range we need, so it should rarely need
47*353d02e9SEmmanuel Vadot 	 * to re-roll.
48*353d02e9SEmmanuel Vadot 	 */
49*353d02e9SEmmanuel Vadot 	for (;;) {
50*353d02e9SEmmanuel Vadot 		r = arc4random();
51*353d02e9SEmmanuel Vadot 		if (r >= min)
52*353d02e9SEmmanuel Vadot 			break;
53*353d02e9SEmmanuel Vadot 	}
54*353d02e9SEmmanuel Vadot 
55*353d02e9SEmmanuel Vadot 	return r % upper_bound;
56*353d02e9SEmmanuel Vadot }
57