xref: /freebsd/contrib/jemalloc/src/jemalloc.c (revision a4bd5210d5e680818a9319a76ebf71caef95bcd3)
1*a4bd5210SJason Evans #define	JEMALLOC_C_
2*a4bd5210SJason Evans #include "jemalloc/internal/jemalloc_internal.h"
3*a4bd5210SJason Evans 
4*a4bd5210SJason Evans /******************************************************************************/
5*a4bd5210SJason Evans /* Data. */
6*a4bd5210SJason Evans 
7*a4bd5210SJason Evans malloc_tsd_data(, arenas, arena_t *, NULL)
8*a4bd5210SJason Evans malloc_tsd_data(, thread_allocated, thread_allocated_t,
9*a4bd5210SJason Evans     THREAD_ALLOCATED_INITIALIZER)
10*a4bd5210SJason Evans 
11*a4bd5210SJason Evans const char	*__malloc_options_1_0;
12*a4bd5210SJason Evans __sym_compat(_malloc_options, __malloc_options_1_0, FBSD_1.0);
13*a4bd5210SJason Evans 
14*a4bd5210SJason Evans /* Runtime configuration options. */
15*a4bd5210SJason Evans const char	*je_malloc_conf JEMALLOC_ATTR(visibility("default"));
16*a4bd5210SJason Evans #ifdef JEMALLOC_DEBUG
17*a4bd5210SJason Evans bool	opt_abort = true;
18*a4bd5210SJason Evans #  ifdef JEMALLOC_FILL
19*a4bd5210SJason Evans bool	opt_junk = true;
20*a4bd5210SJason Evans #  else
21*a4bd5210SJason Evans bool	opt_junk = false;
22*a4bd5210SJason Evans #  endif
23*a4bd5210SJason Evans #else
24*a4bd5210SJason Evans bool	opt_abort = false;
25*a4bd5210SJason Evans bool	opt_junk = false;
26*a4bd5210SJason Evans #endif
27*a4bd5210SJason Evans size_t	opt_quarantine = ZU(0);
28*a4bd5210SJason Evans bool	opt_redzone = false;
29*a4bd5210SJason Evans bool	opt_utrace = false;
30*a4bd5210SJason Evans bool	opt_valgrind = false;
31*a4bd5210SJason Evans bool	opt_xmalloc = false;
32*a4bd5210SJason Evans bool	opt_zero = false;
33*a4bd5210SJason Evans size_t	opt_narenas = 0;
34*a4bd5210SJason Evans 
35*a4bd5210SJason Evans unsigned	ncpus;
36*a4bd5210SJason Evans 
37*a4bd5210SJason Evans malloc_mutex_t		arenas_lock;
38*a4bd5210SJason Evans arena_t			**arenas;
39*a4bd5210SJason Evans unsigned		narenas;
40*a4bd5210SJason Evans 
41*a4bd5210SJason Evans /* Set to true once the allocator has been initialized. */
42*a4bd5210SJason Evans static bool		malloc_initialized = false;
43*a4bd5210SJason Evans 
44*a4bd5210SJason Evans #ifdef JEMALLOC_THREADED_INIT
45*a4bd5210SJason Evans /* Used to let the initializing thread recursively allocate. */
46*a4bd5210SJason Evans #  define NO_INITIALIZER	((unsigned long)0)
47*a4bd5210SJason Evans #  define INITIALIZER		pthread_self()
48*a4bd5210SJason Evans #  define IS_INITIALIZER	(malloc_initializer == pthread_self())
49*a4bd5210SJason Evans static pthread_t		malloc_initializer = NO_INITIALIZER;
50*a4bd5210SJason Evans #else
51*a4bd5210SJason Evans #  define NO_INITIALIZER	false
52*a4bd5210SJason Evans #  define INITIALIZER		true
53*a4bd5210SJason Evans #  define IS_INITIALIZER	malloc_initializer
54*a4bd5210SJason Evans static bool			malloc_initializer = NO_INITIALIZER;
55*a4bd5210SJason Evans #endif
56*a4bd5210SJason Evans 
57*a4bd5210SJason Evans /* Used to avoid initialization races. */
58*a4bd5210SJason Evans static malloc_mutex_t	init_lock = MALLOC_MUTEX_INITIALIZER;
59*a4bd5210SJason Evans 
60*a4bd5210SJason Evans typedef struct {
61*a4bd5210SJason Evans 	void	*p;	/* Input pointer (as in realloc(p, s)). */
62*a4bd5210SJason Evans 	size_t	s;	/* Request size. */
63*a4bd5210SJason Evans 	void	*r;	/* Result pointer. */
64*a4bd5210SJason Evans } malloc_utrace_t;
65*a4bd5210SJason Evans 
66*a4bd5210SJason Evans #ifdef JEMALLOC_UTRACE
67*a4bd5210SJason Evans #  define UTRACE(a, b, c) do {						\
68*a4bd5210SJason Evans 	if (opt_utrace) {						\
69*a4bd5210SJason Evans 		malloc_utrace_t ut;					\
70*a4bd5210SJason Evans 		ut.p = (a);						\
71*a4bd5210SJason Evans 		ut.s = (b);						\
72*a4bd5210SJason Evans 		ut.r = (c);						\
73*a4bd5210SJason Evans 		utrace(&ut, sizeof(ut));				\
74*a4bd5210SJason Evans 	}								\
75*a4bd5210SJason Evans } while (0)
76*a4bd5210SJason Evans #else
77*a4bd5210SJason Evans #  define UTRACE(a, b, c)
78*a4bd5210SJason Evans #endif
79*a4bd5210SJason Evans 
80*a4bd5210SJason Evans /******************************************************************************/
81*a4bd5210SJason Evans /* Function prototypes for non-inline static functions. */
82*a4bd5210SJason Evans 
83*a4bd5210SJason Evans static void	stats_print_atexit(void);
84*a4bd5210SJason Evans static unsigned	malloc_ncpus(void);
85*a4bd5210SJason Evans static bool	malloc_conf_next(char const **opts_p, char const **k_p,
86*a4bd5210SJason Evans     size_t *klen_p, char const **v_p, size_t *vlen_p);
87*a4bd5210SJason Evans static void	malloc_conf_error(const char *msg, const char *k, size_t klen,
88*a4bd5210SJason Evans     const char *v, size_t vlen);
89*a4bd5210SJason Evans static void	malloc_conf_init(void);
90*a4bd5210SJason Evans static bool	malloc_init_hard(void);
91*a4bd5210SJason Evans static int	imemalign(void **memptr, size_t alignment, size_t size,
92*a4bd5210SJason Evans     size_t min_alignment);
93*a4bd5210SJason Evans 
94*a4bd5210SJason Evans /******************************************************************************/
95*a4bd5210SJason Evans /*
96*a4bd5210SJason Evans  * Begin miscellaneous support functions.
97*a4bd5210SJason Evans  */
98*a4bd5210SJason Evans 
99*a4bd5210SJason Evans /* Create a new arena and insert it into the arenas array at index ind. */
100*a4bd5210SJason Evans arena_t *
101*a4bd5210SJason Evans arenas_extend(unsigned ind)
102*a4bd5210SJason Evans {
103*a4bd5210SJason Evans 	arena_t *ret;
104*a4bd5210SJason Evans 
105*a4bd5210SJason Evans 	ret = (arena_t *)base_alloc(sizeof(arena_t));
106*a4bd5210SJason Evans 	if (ret != NULL && arena_new(ret, ind) == false) {
107*a4bd5210SJason Evans 		arenas[ind] = ret;
108*a4bd5210SJason Evans 		return (ret);
109*a4bd5210SJason Evans 	}
110*a4bd5210SJason Evans 	/* Only reached if there is an OOM error. */
111*a4bd5210SJason Evans 
112*a4bd5210SJason Evans 	/*
113*a4bd5210SJason Evans 	 * OOM here is quite inconvenient to propagate, since dealing with it
114*a4bd5210SJason Evans 	 * would require a check for failure in the fast path.  Instead, punt
115*a4bd5210SJason Evans 	 * by using arenas[0].  In practice, this is an extremely unlikely
116*a4bd5210SJason Evans 	 * failure.
117*a4bd5210SJason Evans 	 */
118*a4bd5210SJason Evans 	malloc_write("<jemalloc>: Error initializing arena\n");
119*a4bd5210SJason Evans 	if (opt_abort)
120*a4bd5210SJason Evans 		abort();
121*a4bd5210SJason Evans 
122*a4bd5210SJason Evans 	return (arenas[0]);
123*a4bd5210SJason Evans }
124*a4bd5210SJason Evans 
125*a4bd5210SJason Evans /* Slow path, called only by choose_arena(). */
126*a4bd5210SJason Evans arena_t *
127*a4bd5210SJason Evans choose_arena_hard(void)
128*a4bd5210SJason Evans {
129*a4bd5210SJason Evans 	arena_t *ret;
130*a4bd5210SJason Evans 
131*a4bd5210SJason Evans 	if (narenas > 1) {
132*a4bd5210SJason Evans 		unsigned i, choose, first_null;
133*a4bd5210SJason Evans 
134*a4bd5210SJason Evans 		choose = 0;
135*a4bd5210SJason Evans 		first_null = narenas;
136*a4bd5210SJason Evans 		malloc_mutex_lock(&arenas_lock);
137*a4bd5210SJason Evans 		assert(arenas[0] != NULL);
138*a4bd5210SJason Evans 		for (i = 1; i < narenas; i++) {
139*a4bd5210SJason Evans 			if (arenas[i] != NULL) {
140*a4bd5210SJason Evans 				/*
141*a4bd5210SJason Evans 				 * Choose the first arena that has the lowest
142*a4bd5210SJason Evans 				 * number of threads assigned to it.
143*a4bd5210SJason Evans 				 */
144*a4bd5210SJason Evans 				if (arenas[i]->nthreads <
145*a4bd5210SJason Evans 				    arenas[choose]->nthreads)
146*a4bd5210SJason Evans 					choose = i;
147*a4bd5210SJason Evans 			} else if (first_null == narenas) {
148*a4bd5210SJason Evans 				/*
149*a4bd5210SJason Evans 				 * Record the index of the first uninitialized
150*a4bd5210SJason Evans 				 * arena, in case all extant arenas are in use.
151*a4bd5210SJason Evans 				 *
152*a4bd5210SJason Evans 				 * NB: It is possible for there to be
153*a4bd5210SJason Evans 				 * discontinuities in terms of initialized
154*a4bd5210SJason Evans 				 * versus uninitialized arenas, due to the
155*a4bd5210SJason Evans 				 * "thread.arena" mallctl.
156*a4bd5210SJason Evans 				 */
157*a4bd5210SJason Evans 				first_null = i;
158*a4bd5210SJason Evans 			}
159*a4bd5210SJason Evans 		}
160*a4bd5210SJason Evans 
161*a4bd5210SJason Evans 		if (arenas[choose]->nthreads == 0 || first_null == narenas) {
162*a4bd5210SJason Evans 			/*
163*a4bd5210SJason Evans 			 * Use an unloaded arena, or the least loaded arena if
164*a4bd5210SJason Evans 			 * all arenas are already initialized.
165*a4bd5210SJason Evans 			 */
166*a4bd5210SJason Evans 			ret = arenas[choose];
167*a4bd5210SJason Evans 		} else {
168*a4bd5210SJason Evans 			/* Initialize a new arena. */
169*a4bd5210SJason Evans 			ret = arenas_extend(first_null);
170*a4bd5210SJason Evans 		}
171*a4bd5210SJason Evans 		ret->nthreads++;
172*a4bd5210SJason Evans 		malloc_mutex_unlock(&arenas_lock);
173*a4bd5210SJason Evans 	} else {
174*a4bd5210SJason Evans 		ret = arenas[0];
175*a4bd5210SJason Evans 		malloc_mutex_lock(&arenas_lock);
176*a4bd5210SJason Evans 		ret->nthreads++;
177*a4bd5210SJason Evans 		malloc_mutex_unlock(&arenas_lock);
178*a4bd5210SJason Evans 	}
179*a4bd5210SJason Evans 
180*a4bd5210SJason Evans 	arenas_tsd_set(&ret);
181*a4bd5210SJason Evans 
182*a4bd5210SJason Evans 	return (ret);
183*a4bd5210SJason Evans }
184*a4bd5210SJason Evans 
185*a4bd5210SJason Evans static void
186*a4bd5210SJason Evans stats_print_atexit(void)
187*a4bd5210SJason Evans {
188*a4bd5210SJason Evans 
189*a4bd5210SJason Evans 	if (config_tcache && config_stats) {
190*a4bd5210SJason Evans 		unsigned i;
191*a4bd5210SJason Evans 
192*a4bd5210SJason Evans 		/*
193*a4bd5210SJason Evans 		 * Merge stats from extant threads.  This is racy, since
194*a4bd5210SJason Evans 		 * individual threads do not lock when recording tcache stats
195*a4bd5210SJason Evans 		 * events.  As a consequence, the final stats may be slightly
196*a4bd5210SJason Evans 		 * out of date by the time they are reported, if other threads
197*a4bd5210SJason Evans 		 * continue to allocate.
198*a4bd5210SJason Evans 		 */
199*a4bd5210SJason Evans 		for (i = 0; i < narenas; i++) {
200*a4bd5210SJason Evans 			arena_t *arena = arenas[i];
201*a4bd5210SJason Evans 			if (arena != NULL) {
202*a4bd5210SJason Evans 				tcache_t *tcache;
203*a4bd5210SJason Evans 
204*a4bd5210SJason Evans 				/*
205*a4bd5210SJason Evans 				 * tcache_stats_merge() locks bins, so if any
206*a4bd5210SJason Evans 				 * code is introduced that acquires both arena
207*a4bd5210SJason Evans 				 * and bin locks in the opposite order,
208*a4bd5210SJason Evans 				 * deadlocks may result.
209*a4bd5210SJason Evans 				 */
210*a4bd5210SJason Evans 				malloc_mutex_lock(&arena->lock);
211*a4bd5210SJason Evans 				ql_foreach(tcache, &arena->tcache_ql, link) {
212*a4bd5210SJason Evans 					tcache_stats_merge(tcache, arena);
213*a4bd5210SJason Evans 				}
214*a4bd5210SJason Evans 				malloc_mutex_unlock(&arena->lock);
215*a4bd5210SJason Evans 			}
216*a4bd5210SJason Evans 		}
217*a4bd5210SJason Evans 	}
218*a4bd5210SJason Evans 	je_malloc_stats_print(NULL, NULL, NULL);
219*a4bd5210SJason Evans }
220*a4bd5210SJason Evans 
221*a4bd5210SJason Evans /*
222*a4bd5210SJason Evans  * End miscellaneous support functions.
223*a4bd5210SJason Evans  */
224*a4bd5210SJason Evans /******************************************************************************/
225*a4bd5210SJason Evans /*
226*a4bd5210SJason Evans  * Begin initialization functions.
227*a4bd5210SJason Evans  */
228*a4bd5210SJason Evans 
229*a4bd5210SJason Evans static unsigned
230*a4bd5210SJason Evans malloc_ncpus(void)
231*a4bd5210SJason Evans {
232*a4bd5210SJason Evans 	unsigned ret;
233*a4bd5210SJason Evans 	long result;
234*a4bd5210SJason Evans 
235*a4bd5210SJason Evans 	result = sysconf(_SC_NPROCESSORS_ONLN);
236*a4bd5210SJason Evans 	if (result == -1) {
237*a4bd5210SJason Evans 		/* Error. */
238*a4bd5210SJason Evans 		ret = 1;
239*a4bd5210SJason Evans 	}
240*a4bd5210SJason Evans 	ret = (unsigned)result;
241*a4bd5210SJason Evans 
242*a4bd5210SJason Evans 	return (ret);
243*a4bd5210SJason Evans }
244*a4bd5210SJason Evans 
245*a4bd5210SJason Evans void
246*a4bd5210SJason Evans arenas_cleanup(void *arg)
247*a4bd5210SJason Evans {
248*a4bd5210SJason Evans 	arena_t *arena = *(arena_t **)arg;
249*a4bd5210SJason Evans 
250*a4bd5210SJason Evans 	malloc_mutex_lock(&arenas_lock);
251*a4bd5210SJason Evans 	arena->nthreads--;
252*a4bd5210SJason Evans 	malloc_mutex_unlock(&arenas_lock);
253*a4bd5210SJason Evans }
254*a4bd5210SJason Evans 
255*a4bd5210SJason Evans static inline bool
256*a4bd5210SJason Evans malloc_init(void)
257*a4bd5210SJason Evans {
258*a4bd5210SJason Evans 
259*a4bd5210SJason Evans 	if (malloc_initialized == false)
260*a4bd5210SJason Evans 		return (malloc_init_hard());
261*a4bd5210SJason Evans 
262*a4bd5210SJason Evans 	return (false);
263*a4bd5210SJason Evans }
264*a4bd5210SJason Evans 
265*a4bd5210SJason Evans static bool
266*a4bd5210SJason Evans malloc_conf_next(char const **opts_p, char const **k_p, size_t *klen_p,
267*a4bd5210SJason Evans     char const **v_p, size_t *vlen_p)
268*a4bd5210SJason Evans {
269*a4bd5210SJason Evans 	bool accept;
270*a4bd5210SJason Evans 	const char *opts = *opts_p;
271*a4bd5210SJason Evans 
272*a4bd5210SJason Evans 	*k_p = opts;
273*a4bd5210SJason Evans 
274*a4bd5210SJason Evans 	for (accept = false; accept == false;) {
275*a4bd5210SJason Evans 		switch (*opts) {
276*a4bd5210SJason Evans 		case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
277*a4bd5210SJason Evans 		case 'G': case 'H': case 'I': case 'J': case 'K': case 'L':
278*a4bd5210SJason Evans 		case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R':
279*a4bd5210SJason Evans 		case 'S': case 'T': case 'U': case 'V': case 'W': case 'X':
280*a4bd5210SJason Evans 		case 'Y': case 'Z':
281*a4bd5210SJason Evans 		case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
282*a4bd5210SJason Evans 		case 'g': case 'h': case 'i': case 'j': case 'k': case 'l':
283*a4bd5210SJason Evans 		case 'm': case 'n': case 'o': case 'p': case 'q': case 'r':
284*a4bd5210SJason Evans 		case 's': case 't': case 'u': case 'v': case 'w': case 'x':
285*a4bd5210SJason Evans 		case 'y': case 'z':
286*a4bd5210SJason Evans 		case '0': case '1': case '2': case '3': case '4': case '5':
287*a4bd5210SJason Evans 		case '6': case '7': case '8': case '9':
288*a4bd5210SJason Evans 		case '_':
289*a4bd5210SJason Evans 			opts++;
290*a4bd5210SJason Evans 			break;
291*a4bd5210SJason Evans 		case ':':
292*a4bd5210SJason Evans 			opts++;
293*a4bd5210SJason Evans 			*klen_p = (uintptr_t)opts - 1 - (uintptr_t)*k_p;
294*a4bd5210SJason Evans 			*v_p = opts;
295*a4bd5210SJason Evans 			accept = true;
296*a4bd5210SJason Evans 			break;
297*a4bd5210SJason Evans 		case '\0':
298*a4bd5210SJason Evans 			if (opts != *opts_p) {
299*a4bd5210SJason Evans 				malloc_write("<jemalloc>: Conf string ends "
300*a4bd5210SJason Evans 				    "with key\n");
301*a4bd5210SJason Evans 			}
302*a4bd5210SJason Evans 			return (true);
303*a4bd5210SJason Evans 		default:
304*a4bd5210SJason Evans 			malloc_write("<jemalloc>: Malformed conf string\n");
305*a4bd5210SJason Evans 			return (true);
306*a4bd5210SJason Evans 		}
307*a4bd5210SJason Evans 	}
308*a4bd5210SJason Evans 
309*a4bd5210SJason Evans 	for (accept = false; accept == false;) {
310*a4bd5210SJason Evans 		switch (*opts) {
311*a4bd5210SJason Evans 		case ',':
312*a4bd5210SJason Evans 			opts++;
313*a4bd5210SJason Evans 			/*
314*a4bd5210SJason Evans 			 * Look ahead one character here, because the next time
315*a4bd5210SJason Evans 			 * this function is called, it will assume that end of
316*a4bd5210SJason Evans 			 * input has been cleanly reached if no input remains,
317*a4bd5210SJason Evans 			 * but we have optimistically already consumed the
318*a4bd5210SJason Evans 			 * comma if one exists.
319*a4bd5210SJason Evans 			 */
320*a4bd5210SJason Evans 			if (*opts == '\0') {
321*a4bd5210SJason Evans 				malloc_write("<jemalloc>: Conf string ends "
322*a4bd5210SJason Evans 				    "with comma\n");
323*a4bd5210SJason Evans 			}
324*a4bd5210SJason Evans 			*vlen_p = (uintptr_t)opts - 1 - (uintptr_t)*v_p;
325*a4bd5210SJason Evans 			accept = true;
326*a4bd5210SJason Evans 			break;
327*a4bd5210SJason Evans 		case '\0':
328*a4bd5210SJason Evans 			*vlen_p = (uintptr_t)opts - (uintptr_t)*v_p;
329*a4bd5210SJason Evans 			accept = true;
330*a4bd5210SJason Evans 			break;
331*a4bd5210SJason Evans 		default:
332*a4bd5210SJason Evans 			opts++;
333*a4bd5210SJason Evans 			break;
334*a4bd5210SJason Evans 		}
335*a4bd5210SJason Evans 	}
336*a4bd5210SJason Evans 
337*a4bd5210SJason Evans 	*opts_p = opts;
338*a4bd5210SJason Evans 	return (false);
339*a4bd5210SJason Evans }
340*a4bd5210SJason Evans 
341*a4bd5210SJason Evans static void
342*a4bd5210SJason Evans malloc_conf_error(const char *msg, const char *k, size_t klen, const char *v,
343*a4bd5210SJason Evans     size_t vlen)
344*a4bd5210SJason Evans {
345*a4bd5210SJason Evans 
346*a4bd5210SJason Evans 	malloc_printf("<jemalloc>: %s: %.*s:%.*s\n", msg, (int)klen, k,
347*a4bd5210SJason Evans 	    (int)vlen, v);
348*a4bd5210SJason Evans }
349*a4bd5210SJason Evans 
350*a4bd5210SJason Evans static void
351*a4bd5210SJason Evans malloc_conf_init(void)
352*a4bd5210SJason Evans {
353*a4bd5210SJason Evans 	unsigned i;
354*a4bd5210SJason Evans 	char buf[PATH_MAX + 1];
355*a4bd5210SJason Evans 	const char *opts, *k, *v;
356*a4bd5210SJason Evans 	size_t klen, vlen;
357*a4bd5210SJason Evans 
358*a4bd5210SJason Evans 	for (i = 0; i < 3; i++) {
359*a4bd5210SJason Evans 		/* Get runtime configuration. */
360*a4bd5210SJason Evans 		switch (i) {
361*a4bd5210SJason Evans 		case 0:
362*a4bd5210SJason Evans 			if (je_malloc_conf != NULL) {
363*a4bd5210SJason Evans 				/*
364*a4bd5210SJason Evans 				 * Use options that were compiled into the
365*a4bd5210SJason Evans 				 * program.
366*a4bd5210SJason Evans 				 */
367*a4bd5210SJason Evans 				opts = je_malloc_conf;
368*a4bd5210SJason Evans 			} else {
369*a4bd5210SJason Evans 				/* No configuration specified. */
370*a4bd5210SJason Evans 				buf[0] = '\0';
371*a4bd5210SJason Evans 				opts = buf;
372*a4bd5210SJason Evans 			}
373*a4bd5210SJason Evans 			break;
374*a4bd5210SJason Evans 		case 1: {
375*a4bd5210SJason Evans 			int linklen;
376*a4bd5210SJason Evans 			const char *linkname =
377*a4bd5210SJason Evans #ifdef JEMALLOC_PREFIX
378*a4bd5210SJason Evans 			    "/etc/"JEMALLOC_PREFIX"malloc.conf"
379*a4bd5210SJason Evans #else
380*a4bd5210SJason Evans 			    "/etc/malloc.conf"
381*a4bd5210SJason Evans #endif
382*a4bd5210SJason Evans 			    ;
383*a4bd5210SJason Evans 
384*a4bd5210SJason Evans 			if ((linklen = readlink(linkname, buf,
385*a4bd5210SJason Evans 			    sizeof(buf) - 1)) != -1) {
386*a4bd5210SJason Evans 				/*
387*a4bd5210SJason Evans 				 * Use the contents of the "/etc/malloc.conf"
388*a4bd5210SJason Evans 				 * symbolic link's name.
389*a4bd5210SJason Evans 				 */
390*a4bd5210SJason Evans 				buf[linklen] = '\0';
391*a4bd5210SJason Evans 				opts = buf;
392*a4bd5210SJason Evans 			} else {
393*a4bd5210SJason Evans 				/* No configuration specified. */
394*a4bd5210SJason Evans 				buf[0] = '\0';
395*a4bd5210SJason Evans 				opts = buf;
396*a4bd5210SJason Evans 			}
397*a4bd5210SJason Evans 			break;
398*a4bd5210SJason Evans 		} case 2: {
399*a4bd5210SJason Evans 			const char *envname =
400*a4bd5210SJason Evans #ifdef JEMALLOC_PREFIX
401*a4bd5210SJason Evans 			    JEMALLOC_CPREFIX"MALLOC_CONF"
402*a4bd5210SJason Evans #else
403*a4bd5210SJason Evans 			    "MALLOC_CONF"
404*a4bd5210SJason Evans #endif
405*a4bd5210SJason Evans 			    ;
406*a4bd5210SJason Evans 
407*a4bd5210SJason Evans 			if (issetugid() == 0 && (opts = getenv(envname)) !=
408*a4bd5210SJason Evans 			    NULL) {
409*a4bd5210SJason Evans 				/*
410*a4bd5210SJason Evans 				 * Do nothing; opts is already initialized to
411*a4bd5210SJason Evans 				 * the value of the MALLOC_CONF environment
412*a4bd5210SJason Evans 				 * variable.
413*a4bd5210SJason Evans 				 */
414*a4bd5210SJason Evans 			} else {
415*a4bd5210SJason Evans 				/* No configuration specified. */
416*a4bd5210SJason Evans 				buf[0] = '\0';
417*a4bd5210SJason Evans 				opts = buf;
418*a4bd5210SJason Evans 			}
419*a4bd5210SJason Evans 			break;
420*a4bd5210SJason Evans 		} default:
421*a4bd5210SJason Evans 			/* NOTREACHED */
422*a4bd5210SJason Evans 			assert(false);
423*a4bd5210SJason Evans 			buf[0] = '\0';
424*a4bd5210SJason Evans 			opts = buf;
425*a4bd5210SJason Evans 		}
426*a4bd5210SJason Evans 
427*a4bd5210SJason Evans 		while (*opts != '\0' && malloc_conf_next(&opts, &k, &klen, &v,
428*a4bd5210SJason Evans 		    &vlen) == false) {
429*a4bd5210SJason Evans #define	CONF_HANDLE_BOOL_HIT(o, n, hit)					\
430*a4bd5210SJason Evans 			if (sizeof(#n)-1 == klen && strncmp(#n, k,	\
431*a4bd5210SJason Evans 			    klen) == 0) {				\
432*a4bd5210SJason Evans 				if (strncmp("true", v, vlen) == 0 &&	\
433*a4bd5210SJason Evans 				    vlen == sizeof("true")-1)		\
434*a4bd5210SJason Evans 					o = true;			\
435*a4bd5210SJason Evans 				else if (strncmp("false", v, vlen) ==	\
436*a4bd5210SJason Evans 				    0 && vlen == sizeof("false")-1)	\
437*a4bd5210SJason Evans 					o = false;			\
438*a4bd5210SJason Evans 				else {					\
439*a4bd5210SJason Evans 					malloc_conf_error(		\
440*a4bd5210SJason Evans 					    "Invalid conf value",	\
441*a4bd5210SJason Evans 					    k, klen, v, vlen);		\
442*a4bd5210SJason Evans 				}					\
443*a4bd5210SJason Evans 				hit = true;				\
444*a4bd5210SJason Evans 			} else						\
445*a4bd5210SJason Evans 				hit = false;
446*a4bd5210SJason Evans #define	CONF_HANDLE_BOOL(o, n) {					\
447*a4bd5210SJason Evans 			bool hit;					\
448*a4bd5210SJason Evans 			CONF_HANDLE_BOOL_HIT(o, n, hit);		\
449*a4bd5210SJason Evans 			if (hit)					\
450*a4bd5210SJason Evans 				continue;				\
451*a4bd5210SJason Evans }
452*a4bd5210SJason Evans #define	CONF_HANDLE_SIZE_T(o, n, min, max)				\
453*a4bd5210SJason Evans 			if (sizeof(#n)-1 == klen && strncmp(#n, k,	\
454*a4bd5210SJason Evans 			    klen) == 0) {				\
455*a4bd5210SJason Evans 				uintmax_t um;				\
456*a4bd5210SJason Evans 				char *end;				\
457*a4bd5210SJason Evans 									\
458*a4bd5210SJason Evans 				errno = 0;				\
459*a4bd5210SJason Evans 				um = malloc_strtoumax(v, &end, 0);	\
460*a4bd5210SJason Evans 				if (errno != 0 || (uintptr_t)end -	\
461*a4bd5210SJason Evans 				    (uintptr_t)v != vlen) {		\
462*a4bd5210SJason Evans 					malloc_conf_error(		\
463*a4bd5210SJason Evans 					    "Invalid conf value",	\
464*a4bd5210SJason Evans 					    k, klen, v, vlen);		\
465*a4bd5210SJason Evans 				} else if (um < min || um > max) {	\
466*a4bd5210SJason Evans 					malloc_conf_error(		\
467*a4bd5210SJason Evans 					    "Out-of-range conf value",	\
468*a4bd5210SJason Evans 					    k, klen, v, vlen);		\
469*a4bd5210SJason Evans 				} else					\
470*a4bd5210SJason Evans 					o = um;				\
471*a4bd5210SJason Evans 				continue;				\
472*a4bd5210SJason Evans 			}
473*a4bd5210SJason Evans #define	CONF_HANDLE_SSIZE_T(o, n, min, max)				\
474*a4bd5210SJason Evans 			if (sizeof(#n)-1 == klen && strncmp(#n, k,	\
475*a4bd5210SJason Evans 			    klen) == 0) {				\
476*a4bd5210SJason Evans 				long l;					\
477*a4bd5210SJason Evans 				char *end;				\
478*a4bd5210SJason Evans 									\
479*a4bd5210SJason Evans 				errno = 0;				\
480*a4bd5210SJason Evans 				l = strtol(v, &end, 0);			\
481*a4bd5210SJason Evans 				if (errno != 0 || (uintptr_t)end -	\
482*a4bd5210SJason Evans 				    (uintptr_t)v != vlen) {		\
483*a4bd5210SJason Evans 					malloc_conf_error(		\
484*a4bd5210SJason Evans 					    "Invalid conf value",	\
485*a4bd5210SJason Evans 					    k, klen, v, vlen);		\
486*a4bd5210SJason Evans 				} else if (l < (ssize_t)min || l >	\
487*a4bd5210SJason Evans 				    (ssize_t)max) {			\
488*a4bd5210SJason Evans 					malloc_conf_error(		\
489*a4bd5210SJason Evans 					    "Out-of-range conf value",	\
490*a4bd5210SJason Evans 					    k, klen, v, vlen);		\
491*a4bd5210SJason Evans 				} else					\
492*a4bd5210SJason Evans 					o = l;				\
493*a4bd5210SJason Evans 				continue;				\
494*a4bd5210SJason Evans 			}
495*a4bd5210SJason Evans #define	CONF_HANDLE_CHAR_P(o, n, d)					\
496*a4bd5210SJason Evans 			if (sizeof(#n)-1 == klen && strncmp(#n, k,	\
497*a4bd5210SJason Evans 			    klen) == 0) {				\
498*a4bd5210SJason Evans 				size_t cpylen = (vlen <=		\
499*a4bd5210SJason Evans 				    sizeof(o)-1) ? vlen :		\
500*a4bd5210SJason Evans 				    sizeof(o)-1;			\
501*a4bd5210SJason Evans 				strncpy(o, v, cpylen);			\
502*a4bd5210SJason Evans 				o[cpylen] = '\0';			\
503*a4bd5210SJason Evans 				continue;				\
504*a4bd5210SJason Evans 			}
505*a4bd5210SJason Evans 
506*a4bd5210SJason Evans 			CONF_HANDLE_BOOL(opt_abort, abort)
507*a4bd5210SJason Evans 			/*
508*a4bd5210SJason Evans 			 * Chunks always require at least one header page, plus
509*a4bd5210SJason Evans 			 * one data page in the absence of redzones, or three
510*a4bd5210SJason Evans 			 * pages in the presence of redzones.  In order to
511*a4bd5210SJason Evans 			 * simplify options processing, fix the limit based on
512*a4bd5210SJason Evans 			 * config_fill.
513*a4bd5210SJason Evans 			 */
514*a4bd5210SJason Evans 			CONF_HANDLE_SIZE_T(opt_lg_chunk, lg_chunk, LG_PAGE +
515*a4bd5210SJason Evans 			    (config_fill ? 2 : 1), (sizeof(size_t) << 3) - 1)
516*a4bd5210SJason Evans 			CONF_HANDLE_SIZE_T(opt_narenas, narenas, 1, SIZE_T_MAX)
517*a4bd5210SJason Evans 			CONF_HANDLE_SSIZE_T(opt_lg_dirty_mult, lg_dirty_mult,
518*a4bd5210SJason Evans 			    -1, (sizeof(size_t) << 3) - 1)
519*a4bd5210SJason Evans 			CONF_HANDLE_BOOL(opt_stats_print, stats_print)
520*a4bd5210SJason Evans 			if (config_fill) {
521*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_junk, junk)
522*a4bd5210SJason Evans 				CONF_HANDLE_SIZE_T(opt_quarantine, quarantine,
523*a4bd5210SJason Evans 				    0, SIZE_T_MAX)
524*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_redzone, redzone)
525*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_zero, zero)
526*a4bd5210SJason Evans 			}
527*a4bd5210SJason Evans 			if (config_utrace) {
528*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_utrace, utrace)
529*a4bd5210SJason Evans 			}
530*a4bd5210SJason Evans 			if (config_valgrind) {
531*a4bd5210SJason Evans 				bool hit;
532*a4bd5210SJason Evans 				CONF_HANDLE_BOOL_HIT(opt_valgrind,
533*a4bd5210SJason Evans 				    valgrind, hit)
534*a4bd5210SJason Evans 				if (config_fill && opt_valgrind && hit) {
535*a4bd5210SJason Evans 					opt_junk = false;
536*a4bd5210SJason Evans 					opt_zero = false;
537*a4bd5210SJason Evans 					if (opt_quarantine == 0) {
538*a4bd5210SJason Evans 						opt_quarantine =
539*a4bd5210SJason Evans 						    JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
540*a4bd5210SJason Evans 					}
541*a4bd5210SJason Evans 					opt_redzone = true;
542*a4bd5210SJason Evans 				}
543*a4bd5210SJason Evans 				if (hit)
544*a4bd5210SJason Evans 					continue;
545*a4bd5210SJason Evans 			}
546*a4bd5210SJason Evans 			if (config_xmalloc) {
547*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_xmalloc, xmalloc)
548*a4bd5210SJason Evans 			}
549*a4bd5210SJason Evans 			if (config_tcache) {
550*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_tcache, tcache)
551*a4bd5210SJason Evans 				CONF_HANDLE_SSIZE_T(opt_lg_tcache_max,
552*a4bd5210SJason Evans 				    lg_tcache_max, -1,
553*a4bd5210SJason Evans 				    (sizeof(size_t) << 3) - 1)
554*a4bd5210SJason Evans 			}
555*a4bd5210SJason Evans 			if (config_prof) {
556*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_prof, prof)
557*a4bd5210SJason Evans 				CONF_HANDLE_CHAR_P(opt_prof_prefix, prof_prefix,
558*a4bd5210SJason Evans 				    "jeprof")
559*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_prof_active, prof_active)
560*a4bd5210SJason Evans 				CONF_HANDLE_SSIZE_T(opt_lg_prof_sample,
561*a4bd5210SJason Evans 				    lg_prof_sample, 0,
562*a4bd5210SJason Evans 				    (sizeof(uint64_t) << 3) - 1)
563*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_prof_accum, prof_accum)
564*a4bd5210SJason Evans 				CONF_HANDLE_SSIZE_T(opt_lg_prof_interval,
565*a4bd5210SJason Evans 				    lg_prof_interval, -1,
566*a4bd5210SJason Evans 				    (sizeof(uint64_t) << 3) - 1)
567*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_prof_gdump, prof_gdump)
568*a4bd5210SJason Evans 				CONF_HANDLE_BOOL(opt_prof_leak, prof_leak)
569*a4bd5210SJason Evans 			}
570*a4bd5210SJason Evans 			malloc_conf_error("Invalid conf pair", k, klen, v,
571*a4bd5210SJason Evans 			    vlen);
572*a4bd5210SJason Evans #undef CONF_HANDLE_BOOL
573*a4bd5210SJason Evans #undef CONF_HANDLE_SIZE_T
574*a4bd5210SJason Evans #undef CONF_HANDLE_SSIZE_T
575*a4bd5210SJason Evans #undef CONF_HANDLE_CHAR_P
576*a4bd5210SJason Evans 		}
577*a4bd5210SJason Evans 	}
578*a4bd5210SJason Evans }
579*a4bd5210SJason Evans 
580*a4bd5210SJason Evans static bool
581*a4bd5210SJason Evans malloc_init_hard(void)
582*a4bd5210SJason Evans {
583*a4bd5210SJason Evans 	arena_t *init_arenas[1];
584*a4bd5210SJason Evans 
585*a4bd5210SJason Evans 	malloc_mutex_lock(&init_lock);
586*a4bd5210SJason Evans 	if (malloc_initialized || IS_INITIALIZER) {
587*a4bd5210SJason Evans 		/*
588*a4bd5210SJason Evans 		 * Another thread initialized the allocator before this one
589*a4bd5210SJason Evans 		 * acquired init_lock, or this thread is the initializing
590*a4bd5210SJason Evans 		 * thread, and it is recursively allocating.
591*a4bd5210SJason Evans 		 */
592*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
593*a4bd5210SJason Evans 		return (false);
594*a4bd5210SJason Evans 	}
595*a4bd5210SJason Evans #ifdef JEMALLOC_THREADED_INIT
596*a4bd5210SJason Evans 	if (malloc_initializer != NO_INITIALIZER && IS_INITIALIZER == false) {
597*a4bd5210SJason Evans 		/* Busy-wait until the initializing thread completes. */
598*a4bd5210SJason Evans 		do {
599*a4bd5210SJason Evans 			malloc_mutex_unlock(&init_lock);
600*a4bd5210SJason Evans 			CPU_SPINWAIT;
601*a4bd5210SJason Evans 			malloc_mutex_lock(&init_lock);
602*a4bd5210SJason Evans 		} while (malloc_initialized == false);
603*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
604*a4bd5210SJason Evans 		return (false);
605*a4bd5210SJason Evans 	}
606*a4bd5210SJason Evans #endif
607*a4bd5210SJason Evans 	malloc_initializer = INITIALIZER;
608*a4bd5210SJason Evans 
609*a4bd5210SJason Evans 	malloc_tsd_boot();
610*a4bd5210SJason Evans 	if (config_prof)
611*a4bd5210SJason Evans 		prof_boot0();
612*a4bd5210SJason Evans 
613*a4bd5210SJason Evans 	malloc_conf_init();
614*a4bd5210SJason Evans 
615*a4bd5210SJason Evans #if (!defined(JEMALLOC_MUTEX_INIT_CB) && !defined(JEMALLOC_ZONE))
616*a4bd5210SJason Evans 	/* Register fork handlers. */
617*a4bd5210SJason Evans 	if (pthread_atfork(jemalloc_prefork, jemalloc_postfork_parent,
618*a4bd5210SJason Evans 	    jemalloc_postfork_child) != 0) {
619*a4bd5210SJason Evans 		malloc_write("<jemalloc>: Error in pthread_atfork()\n");
620*a4bd5210SJason Evans 		if (opt_abort)
621*a4bd5210SJason Evans 			abort();
622*a4bd5210SJason Evans 	}
623*a4bd5210SJason Evans #endif
624*a4bd5210SJason Evans 
625*a4bd5210SJason Evans 	if (opt_stats_print) {
626*a4bd5210SJason Evans 		/* Print statistics at exit. */
627*a4bd5210SJason Evans 		if (atexit(stats_print_atexit) != 0) {
628*a4bd5210SJason Evans 			malloc_write("<jemalloc>: Error in atexit()\n");
629*a4bd5210SJason Evans 			if (opt_abort)
630*a4bd5210SJason Evans 				abort();
631*a4bd5210SJason Evans 		}
632*a4bd5210SJason Evans 	}
633*a4bd5210SJason Evans 
634*a4bd5210SJason Evans 	if (base_boot()) {
635*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
636*a4bd5210SJason Evans 		return (true);
637*a4bd5210SJason Evans 	}
638*a4bd5210SJason Evans 
639*a4bd5210SJason Evans 	if (chunk_boot0()) {
640*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
641*a4bd5210SJason Evans 		return (true);
642*a4bd5210SJason Evans 	}
643*a4bd5210SJason Evans 
644*a4bd5210SJason Evans 	if (ctl_boot()) {
645*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
646*a4bd5210SJason Evans 		return (true);
647*a4bd5210SJason Evans 	}
648*a4bd5210SJason Evans 
649*a4bd5210SJason Evans 	if (config_prof)
650*a4bd5210SJason Evans 		prof_boot1();
651*a4bd5210SJason Evans 
652*a4bd5210SJason Evans 	arena_boot();
653*a4bd5210SJason Evans 
654*a4bd5210SJason Evans 	if (config_tcache && tcache_boot0()) {
655*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
656*a4bd5210SJason Evans 		return (true);
657*a4bd5210SJason Evans 	}
658*a4bd5210SJason Evans 
659*a4bd5210SJason Evans 	if (huge_boot()) {
660*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
661*a4bd5210SJason Evans 		return (true);
662*a4bd5210SJason Evans 	}
663*a4bd5210SJason Evans 
664*a4bd5210SJason Evans 	if (malloc_mutex_init(&arenas_lock))
665*a4bd5210SJason Evans 		return (true);
666*a4bd5210SJason Evans 
667*a4bd5210SJason Evans 	/*
668*a4bd5210SJason Evans 	 * Create enough scaffolding to allow recursive allocation in
669*a4bd5210SJason Evans 	 * malloc_ncpus().
670*a4bd5210SJason Evans 	 */
671*a4bd5210SJason Evans 	narenas = 1;
672*a4bd5210SJason Evans 	arenas = init_arenas;
673*a4bd5210SJason Evans 	memset(arenas, 0, sizeof(arena_t *) * narenas);
674*a4bd5210SJason Evans 
675*a4bd5210SJason Evans 	/*
676*a4bd5210SJason Evans 	 * Initialize one arena here.  The rest are lazily created in
677*a4bd5210SJason Evans 	 * choose_arena_hard().
678*a4bd5210SJason Evans 	 */
679*a4bd5210SJason Evans 	arenas_extend(0);
680*a4bd5210SJason Evans 	if (arenas[0] == NULL) {
681*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
682*a4bd5210SJason Evans 		return (true);
683*a4bd5210SJason Evans 	}
684*a4bd5210SJason Evans 
685*a4bd5210SJason Evans 	/* Initialize allocation counters before any allocations can occur. */
686*a4bd5210SJason Evans 	if (config_stats && thread_allocated_tsd_boot()) {
687*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
688*a4bd5210SJason Evans 		return (true);
689*a4bd5210SJason Evans 	}
690*a4bd5210SJason Evans 
691*a4bd5210SJason Evans 	if (arenas_tsd_boot()) {
692*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
693*a4bd5210SJason Evans 		return (true);
694*a4bd5210SJason Evans 	}
695*a4bd5210SJason Evans 
696*a4bd5210SJason Evans 	if (config_tcache && tcache_boot1()) {
697*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
698*a4bd5210SJason Evans 		return (true);
699*a4bd5210SJason Evans 	}
700*a4bd5210SJason Evans 
701*a4bd5210SJason Evans 	if (config_fill && quarantine_boot()) {
702*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
703*a4bd5210SJason Evans 		return (true);
704*a4bd5210SJason Evans 	}
705*a4bd5210SJason Evans 
706*a4bd5210SJason Evans 	if (config_prof && prof_boot2()) {
707*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
708*a4bd5210SJason Evans 		return (true);
709*a4bd5210SJason Evans 	}
710*a4bd5210SJason Evans 
711*a4bd5210SJason Evans 	/* Get number of CPUs. */
712*a4bd5210SJason Evans 	malloc_mutex_unlock(&init_lock);
713*a4bd5210SJason Evans 	ncpus = malloc_ncpus();
714*a4bd5210SJason Evans 	malloc_mutex_lock(&init_lock);
715*a4bd5210SJason Evans 
716*a4bd5210SJason Evans 	if (chunk_boot1()) {
717*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
718*a4bd5210SJason Evans 		return (true);
719*a4bd5210SJason Evans 	}
720*a4bd5210SJason Evans 
721*a4bd5210SJason Evans 	if (mutex_boot()) {
722*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
723*a4bd5210SJason Evans 		return (true);
724*a4bd5210SJason Evans 	}
725*a4bd5210SJason Evans 
726*a4bd5210SJason Evans 	if (opt_narenas == 0) {
727*a4bd5210SJason Evans 		/*
728*a4bd5210SJason Evans 		 * For SMP systems, create more than one arena per CPU by
729*a4bd5210SJason Evans 		 * default.
730*a4bd5210SJason Evans 		 */
731*a4bd5210SJason Evans 		if (ncpus > 1)
732*a4bd5210SJason Evans 			opt_narenas = ncpus << 2;
733*a4bd5210SJason Evans 		else
734*a4bd5210SJason Evans 			opt_narenas = 1;
735*a4bd5210SJason Evans 	}
736*a4bd5210SJason Evans 	narenas = opt_narenas;
737*a4bd5210SJason Evans 	/*
738*a4bd5210SJason Evans 	 * Make sure that the arenas array can be allocated.  In practice, this
739*a4bd5210SJason Evans 	 * limit is enough to allow the allocator to function, but the ctl
740*a4bd5210SJason Evans 	 * machinery will fail to allocate memory at far lower limits.
741*a4bd5210SJason Evans 	 */
742*a4bd5210SJason Evans 	if (narenas > chunksize / sizeof(arena_t *)) {
743*a4bd5210SJason Evans 		narenas = chunksize / sizeof(arena_t *);
744*a4bd5210SJason Evans 		malloc_printf("<jemalloc>: Reducing narenas to limit (%d)\n",
745*a4bd5210SJason Evans 		    narenas);
746*a4bd5210SJason Evans 	}
747*a4bd5210SJason Evans 
748*a4bd5210SJason Evans 	/* Allocate and initialize arenas. */
749*a4bd5210SJason Evans 	arenas = (arena_t **)base_alloc(sizeof(arena_t *) * narenas);
750*a4bd5210SJason Evans 	if (arenas == NULL) {
751*a4bd5210SJason Evans 		malloc_mutex_unlock(&init_lock);
752*a4bd5210SJason Evans 		return (true);
753*a4bd5210SJason Evans 	}
754*a4bd5210SJason Evans 	/*
755*a4bd5210SJason Evans 	 * Zero the array.  In practice, this should always be pre-zeroed,
756*a4bd5210SJason Evans 	 * since it was just mmap()ed, but let's be sure.
757*a4bd5210SJason Evans 	 */
758*a4bd5210SJason Evans 	memset(arenas, 0, sizeof(arena_t *) * narenas);
759*a4bd5210SJason Evans 	/* Copy the pointer to the one arena that was already initialized. */
760*a4bd5210SJason Evans 	arenas[0] = init_arenas[0];
761*a4bd5210SJason Evans 
762*a4bd5210SJason Evans 	malloc_initialized = true;
763*a4bd5210SJason Evans 	malloc_mutex_unlock(&init_lock);
764*a4bd5210SJason Evans 	return (false);
765*a4bd5210SJason Evans }
766*a4bd5210SJason Evans 
767*a4bd5210SJason Evans /*
768*a4bd5210SJason Evans  * End initialization functions.
769*a4bd5210SJason Evans  */
770*a4bd5210SJason Evans /******************************************************************************/
771*a4bd5210SJason Evans /*
772*a4bd5210SJason Evans  * Begin malloc(3)-compatible functions.
773*a4bd5210SJason Evans  */
774*a4bd5210SJason Evans 
775*a4bd5210SJason Evans JEMALLOC_ATTR(malloc)
776*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
777*a4bd5210SJason Evans void *
778*a4bd5210SJason Evans je_malloc(size_t size)
779*a4bd5210SJason Evans {
780*a4bd5210SJason Evans 	void *ret;
781*a4bd5210SJason Evans 	size_t usize;
782*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
783*a4bd5210SJason Evans 
784*a4bd5210SJason Evans 	if (malloc_init()) {
785*a4bd5210SJason Evans 		ret = NULL;
786*a4bd5210SJason Evans 		goto label_oom;
787*a4bd5210SJason Evans 	}
788*a4bd5210SJason Evans 
789*a4bd5210SJason Evans 	if (size == 0)
790*a4bd5210SJason Evans 		size = 1;
791*a4bd5210SJason Evans 
792*a4bd5210SJason Evans 	if (config_prof && opt_prof) {
793*a4bd5210SJason Evans 		usize = s2u(size);
794*a4bd5210SJason Evans 		PROF_ALLOC_PREP(1, usize, cnt);
795*a4bd5210SJason Evans 		if (cnt == NULL) {
796*a4bd5210SJason Evans 			ret = NULL;
797*a4bd5210SJason Evans 			goto label_oom;
798*a4bd5210SJason Evans 		}
799*a4bd5210SJason Evans 		if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
800*a4bd5210SJason Evans 		    SMALL_MAXCLASS) {
801*a4bd5210SJason Evans 			ret = imalloc(SMALL_MAXCLASS+1);
802*a4bd5210SJason Evans 			if (ret != NULL)
803*a4bd5210SJason Evans 				arena_prof_promoted(ret, usize);
804*a4bd5210SJason Evans 		} else
805*a4bd5210SJason Evans 			ret = imalloc(size);
806*a4bd5210SJason Evans 	} else {
807*a4bd5210SJason Evans 		if (config_stats || (config_valgrind && opt_valgrind))
808*a4bd5210SJason Evans 			usize = s2u(size);
809*a4bd5210SJason Evans 		ret = imalloc(size);
810*a4bd5210SJason Evans 	}
811*a4bd5210SJason Evans 
812*a4bd5210SJason Evans label_oom:
813*a4bd5210SJason Evans 	if (ret == NULL) {
814*a4bd5210SJason Evans 		if (config_xmalloc && opt_xmalloc) {
815*a4bd5210SJason Evans 			malloc_write("<jemalloc>: Error in malloc(): "
816*a4bd5210SJason Evans 			    "out of memory\n");
817*a4bd5210SJason Evans 			abort();
818*a4bd5210SJason Evans 		}
819*a4bd5210SJason Evans 		errno = ENOMEM;
820*a4bd5210SJason Evans 	}
821*a4bd5210SJason Evans 	if (config_prof && opt_prof && ret != NULL)
822*a4bd5210SJason Evans 		prof_malloc(ret, usize, cnt);
823*a4bd5210SJason Evans 	if (config_stats && ret != NULL) {
824*a4bd5210SJason Evans 		assert(usize == isalloc(ret, config_prof));
825*a4bd5210SJason Evans 		thread_allocated_tsd_get()->allocated += usize;
826*a4bd5210SJason Evans 	}
827*a4bd5210SJason Evans 	UTRACE(0, size, ret);
828*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, false);
829*a4bd5210SJason Evans 	return (ret);
830*a4bd5210SJason Evans }
831*a4bd5210SJason Evans 
832*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
833*a4bd5210SJason Evans #ifdef JEMALLOC_PROF
834*a4bd5210SJason Evans /*
835*a4bd5210SJason Evans  * Avoid any uncertainty as to how many backtrace frames to ignore in
836*a4bd5210SJason Evans  * PROF_ALLOC_PREP().
837*a4bd5210SJason Evans  */
838*a4bd5210SJason Evans JEMALLOC_ATTR(noinline)
839*a4bd5210SJason Evans #endif
840*a4bd5210SJason Evans static int
841*a4bd5210SJason Evans imemalign(void **memptr, size_t alignment, size_t size,
842*a4bd5210SJason Evans     size_t min_alignment)
843*a4bd5210SJason Evans {
844*a4bd5210SJason Evans 	int ret;
845*a4bd5210SJason Evans 	size_t usize;
846*a4bd5210SJason Evans 	void *result;
847*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
848*a4bd5210SJason Evans 
849*a4bd5210SJason Evans 	assert(min_alignment != 0);
850*a4bd5210SJason Evans 
851*a4bd5210SJason Evans 	if (malloc_init())
852*a4bd5210SJason Evans 		result = NULL;
853*a4bd5210SJason Evans 	else {
854*a4bd5210SJason Evans 		if (size == 0)
855*a4bd5210SJason Evans 			size = 1;
856*a4bd5210SJason Evans 
857*a4bd5210SJason Evans 		/* Make sure that alignment is a large enough power of 2. */
858*a4bd5210SJason Evans 		if (((alignment - 1) & alignment) != 0
859*a4bd5210SJason Evans 		    || (alignment < min_alignment)) {
860*a4bd5210SJason Evans 			if (config_xmalloc && opt_xmalloc) {
861*a4bd5210SJason Evans 				malloc_write("<jemalloc>: Error allocating "
862*a4bd5210SJason Evans 				    "aligned memory: invalid alignment\n");
863*a4bd5210SJason Evans 				abort();
864*a4bd5210SJason Evans 			}
865*a4bd5210SJason Evans 			result = NULL;
866*a4bd5210SJason Evans 			ret = EINVAL;
867*a4bd5210SJason Evans 			goto label_return;
868*a4bd5210SJason Evans 		}
869*a4bd5210SJason Evans 
870*a4bd5210SJason Evans 		usize = sa2u(size, alignment);
871*a4bd5210SJason Evans 		if (usize == 0) {
872*a4bd5210SJason Evans 			result = NULL;
873*a4bd5210SJason Evans 			ret = ENOMEM;
874*a4bd5210SJason Evans 			goto label_return;
875*a4bd5210SJason Evans 		}
876*a4bd5210SJason Evans 
877*a4bd5210SJason Evans 		if (config_prof && opt_prof) {
878*a4bd5210SJason Evans 			PROF_ALLOC_PREP(2, usize, cnt);
879*a4bd5210SJason Evans 			if (cnt == NULL) {
880*a4bd5210SJason Evans 				result = NULL;
881*a4bd5210SJason Evans 				ret = EINVAL;
882*a4bd5210SJason Evans 			} else {
883*a4bd5210SJason Evans 				if (prof_promote && (uintptr_t)cnt !=
884*a4bd5210SJason Evans 				    (uintptr_t)1U && usize <= SMALL_MAXCLASS) {
885*a4bd5210SJason Evans 					assert(sa2u(SMALL_MAXCLASS+1,
886*a4bd5210SJason Evans 					    alignment) != 0);
887*a4bd5210SJason Evans 					result = ipalloc(sa2u(SMALL_MAXCLASS+1,
888*a4bd5210SJason Evans 					    alignment), alignment, false);
889*a4bd5210SJason Evans 					if (result != NULL) {
890*a4bd5210SJason Evans 						arena_prof_promoted(result,
891*a4bd5210SJason Evans 						    usize);
892*a4bd5210SJason Evans 					}
893*a4bd5210SJason Evans 				} else {
894*a4bd5210SJason Evans 					result = ipalloc(usize, alignment,
895*a4bd5210SJason Evans 					    false);
896*a4bd5210SJason Evans 				}
897*a4bd5210SJason Evans 			}
898*a4bd5210SJason Evans 		} else
899*a4bd5210SJason Evans 			result = ipalloc(usize, alignment, false);
900*a4bd5210SJason Evans 	}
901*a4bd5210SJason Evans 
902*a4bd5210SJason Evans 	if (result == NULL) {
903*a4bd5210SJason Evans 		if (config_xmalloc && opt_xmalloc) {
904*a4bd5210SJason Evans 			malloc_write("<jemalloc>: Error allocating aligned "
905*a4bd5210SJason Evans 			    "memory: out of memory\n");
906*a4bd5210SJason Evans 			abort();
907*a4bd5210SJason Evans 		}
908*a4bd5210SJason Evans 		ret = ENOMEM;
909*a4bd5210SJason Evans 		goto label_return;
910*a4bd5210SJason Evans 	}
911*a4bd5210SJason Evans 
912*a4bd5210SJason Evans 	*memptr = result;
913*a4bd5210SJason Evans 	ret = 0;
914*a4bd5210SJason Evans 
915*a4bd5210SJason Evans label_return:
916*a4bd5210SJason Evans 	if (config_stats && result != NULL) {
917*a4bd5210SJason Evans 		assert(usize == isalloc(result, config_prof));
918*a4bd5210SJason Evans 		thread_allocated_tsd_get()->allocated += usize;
919*a4bd5210SJason Evans 	}
920*a4bd5210SJason Evans 	if (config_prof && opt_prof && result != NULL)
921*a4bd5210SJason Evans 		prof_malloc(result, usize, cnt);
922*a4bd5210SJason Evans 	UTRACE(0, size, result);
923*a4bd5210SJason Evans 	return (ret);
924*a4bd5210SJason Evans }
925*a4bd5210SJason Evans 
926*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
927*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
928*a4bd5210SJason Evans int
929*a4bd5210SJason Evans je_posix_memalign(void **memptr, size_t alignment, size_t size)
930*a4bd5210SJason Evans {
931*a4bd5210SJason Evans 	int ret = imemalign(memptr, alignment, size, sizeof(void *));
932*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(ret == 0, *memptr, isalloc(*memptr,
933*a4bd5210SJason Evans 	    config_prof), false);
934*a4bd5210SJason Evans 	return (ret);
935*a4bd5210SJason Evans }
936*a4bd5210SJason Evans 
937*a4bd5210SJason Evans JEMALLOC_ATTR(malloc)
938*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
939*a4bd5210SJason Evans void *
940*a4bd5210SJason Evans je_aligned_alloc(size_t alignment, size_t size)
941*a4bd5210SJason Evans {
942*a4bd5210SJason Evans 	void *ret;
943*a4bd5210SJason Evans 	int err;
944*a4bd5210SJason Evans 
945*a4bd5210SJason Evans 	if ((err = imemalign(&ret, alignment, size, 1)) != 0) {
946*a4bd5210SJason Evans 		ret = NULL;
947*a4bd5210SJason Evans 		errno = err;
948*a4bd5210SJason Evans 	}
949*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(err == 0, ret, isalloc(ret, config_prof),
950*a4bd5210SJason Evans 	    false);
951*a4bd5210SJason Evans 	return (ret);
952*a4bd5210SJason Evans }
953*a4bd5210SJason Evans 
954*a4bd5210SJason Evans JEMALLOC_ATTR(malloc)
955*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
956*a4bd5210SJason Evans void *
957*a4bd5210SJason Evans je_calloc(size_t num, size_t size)
958*a4bd5210SJason Evans {
959*a4bd5210SJason Evans 	void *ret;
960*a4bd5210SJason Evans 	size_t num_size;
961*a4bd5210SJason Evans 	size_t usize;
962*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
963*a4bd5210SJason Evans 
964*a4bd5210SJason Evans 	if (malloc_init()) {
965*a4bd5210SJason Evans 		num_size = 0;
966*a4bd5210SJason Evans 		ret = NULL;
967*a4bd5210SJason Evans 		goto label_return;
968*a4bd5210SJason Evans 	}
969*a4bd5210SJason Evans 
970*a4bd5210SJason Evans 	num_size = num * size;
971*a4bd5210SJason Evans 	if (num_size == 0) {
972*a4bd5210SJason Evans 		if (num == 0 || size == 0)
973*a4bd5210SJason Evans 			num_size = 1;
974*a4bd5210SJason Evans 		else {
975*a4bd5210SJason Evans 			ret = NULL;
976*a4bd5210SJason Evans 			goto label_return;
977*a4bd5210SJason Evans 		}
978*a4bd5210SJason Evans 	/*
979*a4bd5210SJason Evans 	 * Try to avoid division here.  We know that it isn't possible to
980*a4bd5210SJason Evans 	 * overflow during multiplication if neither operand uses any of the
981*a4bd5210SJason Evans 	 * most significant half of the bits in a size_t.
982*a4bd5210SJason Evans 	 */
983*a4bd5210SJason Evans 	} else if (((num | size) & (SIZE_T_MAX << (sizeof(size_t) << 2)))
984*a4bd5210SJason Evans 	    && (num_size / size != num)) {
985*a4bd5210SJason Evans 		/* size_t overflow. */
986*a4bd5210SJason Evans 		ret = NULL;
987*a4bd5210SJason Evans 		goto label_return;
988*a4bd5210SJason Evans 	}
989*a4bd5210SJason Evans 
990*a4bd5210SJason Evans 	if (config_prof && opt_prof) {
991*a4bd5210SJason Evans 		usize = s2u(num_size);
992*a4bd5210SJason Evans 		PROF_ALLOC_PREP(1, usize, cnt);
993*a4bd5210SJason Evans 		if (cnt == NULL) {
994*a4bd5210SJason Evans 			ret = NULL;
995*a4bd5210SJason Evans 			goto label_return;
996*a4bd5210SJason Evans 		}
997*a4bd5210SJason Evans 		if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize
998*a4bd5210SJason Evans 		    <= SMALL_MAXCLASS) {
999*a4bd5210SJason Evans 			ret = icalloc(SMALL_MAXCLASS+1);
1000*a4bd5210SJason Evans 			if (ret != NULL)
1001*a4bd5210SJason Evans 				arena_prof_promoted(ret, usize);
1002*a4bd5210SJason Evans 		} else
1003*a4bd5210SJason Evans 			ret = icalloc(num_size);
1004*a4bd5210SJason Evans 	} else {
1005*a4bd5210SJason Evans 		if (config_stats || (config_valgrind && opt_valgrind))
1006*a4bd5210SJason Evans 			usize = s2u(num_size);
1007*a4bd5210SJason Evans 		ret = icalloc(num_size);
1008*a4bd5210SJason Evans 	}
1009*a4bd5210SJason Evans 
1010*a4bd5210SJason Evans label_return:
1011*a4bd5210SJason Evans 	if (ret == NULL) {
1012*a4bd5210SJason Evans 		if (config_xmalloc && opt_xmalloc) {
1013*a4bd5210SJason Evans 			malloc_write("<jemalloc>: Error in calloc(): out of "
1014*a4bd5210SJason Evans 			    "memory\n");
1015*a4bd5210SJason Evans 			abort();
1016*a4bd5210SJason Evans 		}
1017*a4bd5210SJason Evans 		errno = ENOMEM;
1018*a4bd5210SJason Evans 	}
1019*a4bd5210SJason Evans 
1020*a4bd5210SJason Evans 	if (config_prof && opt_prof && ret != NULL)
1021*a4bd5210SJason Evans 		prof_malloc(ret, usize, cnt);
1022*a4bd5210SJason Evans 	if (config_stats && ret != NULL) {
1023*a4bd5210SJason Evans 		assert(usize == isalloc(ret, config_prof));
1024*a4bd5210SJason Evans 		thread_allocated_tsd_get()->allocated += usize;
1025*a4bd5210SJason Evans 	}
1026*a4bd5210SJason Evans 	UTRACE(0, num_size, ret);
1027*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, usize, true);
1028*a4bd5210SJason Evans 	return (ret);
1029*a4bd5210SJason Evans }
1030*a4bd5210SJason Evans 
1031*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1032*a4bd5210SJason Evans void *
1033*a4bd5210SJason Evans je_realloc(void *ptr, size_t size)
1034*a4bd5210SJason Evans {
1035*a4bd5210SJason Evans 	void *ret;
1036*a4bd5210SJason Evans 	size_t usize;
1037*a4bd5210SJason Evans 	size_t old_size = 0;
1038*a4bd5210SJason Evans 	size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1039*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt JEMALLOC_CC_SILENCE_INIT(NULL);
1040*a4bd5210SJason Evans 	prof_ctx_t *old_ctx JEMALLOC_CC_SILENCE_INIT(NULL);
1041*a4bd5210SJason Evans 
1042*a4bd5210SJason Evans 	if (size == 0) {
1043*a4bd5210SJason Evans 		if (ptr != NULL) {
1044*a4bd5210SJason Evans 			/* realloc(ptr, 0) is equivalent to free(p). */
1045*a4bd5210SJason Evans 			if (config_prof) {
1046*a4bd5210SJason Evans 				old_size = isalloc(ptr, true);
1047*a4bd5210SJason Evans 				if (config_valgrind && opt_valgrind)
1048*a4bd5210SJason Evans 					old_rzsize = p2rz(ptr);
1049*a4bd5210SJason Evans 			} else if (config_stats) {
1050*a4bd5210SJason Evans 				old_size = isalloc(ptr, false);
1051*a4bd5210SJason Evans 				if (config_valgrind && opt_valgrind)
1052*a4bd5210SJason Evans 					old_rzsize = u2rz(old_size);
1053*a4bd5210SJason Evans 			} else if (config_valgrind && opt_valgrind) {
1054*a4bd5210SJason Evans 				old_size = isalloc(ptr, false);
1055*a4bd5210SJason Evans 				old_rzsize = u2rz(old_size);
1056*a4bd5210SJason Evans 			}
1057*a4bd5210SJason Evans 			if (config_prof && opt_prof) {
1058*a4bd5210SJason Evans 				old_ctx = prof_ctx_get(ptr);
1059*a4bd5210SJason Evans 				cnt = NULL;
1060*a4bd5210SJason Evans 			}
1061*a4bd5210SJason Evans 			iqalloc(ptr);
1062*a4bd5210SJason Evans 			ret = NULL;
1063*a4bd5210SJason Evans 			goto label_return;
1064*a4bd5210SJason Evans 		} else
1065*a4bd5210SJason Evans 			size = 1;
1066*a4bd5210SJason Evans 	}
1067*a4bd5210SJason Evans 
1068*a4bd5210SJason Evans 	if (ptr != NULL) {
1069*a4bd5210SJason Evans 		assert(malloc_initialized || IS_INITIALIZER);
1070*a4bd5210SJason Evans 
1071*a4bd5210SJason Evans 		if (config_prof) {
1072*a4bd5210SJason Evans 			old_size = isalloc(ptr, true);
1073*a4bd5210SJason Evans 			if (config_valgrind && opt_valgrind)
1074*a4bd5210SJason Evans 				old_rzsize = p2rz(ptr);
1075*a4bd5210SJason Evans 		} else if (config_stats) {
1076*a4bd5210SJason Evans 			old_size = isalloc(ptr, false);
1077*a4bd5210SJason Evans 			if (config_valgrind && opt_valgrind)
1078*a4bd5210SJason Evans 				old_rzsize = u2rz(old_size);
1079*a4bd5210SJason Evans 		} else if (config_valgrind && opt_valgrind) {
1080*a4bd5210SJason Evans 			old_size = isalloc(ptr, false);
1081*a4bd5210SJason Evans 			old_rzsize = u2rz(old_size);
1082*a4bd5210SJason Evans 		}
1083*a4bd5210SJason Evans 		if (config_prof && opt_prof) {
1084*a4bd5210SJason Evans 			usize = s2u(size);
1085*a4bd5210SJason Evans 			old_ctx = prof_ctx_get(ptr);
1086*a4bd5210SJason Evans 			PROF_ALLOC_PREP(1, usize, cnt);
1087*a4bd5210SJason Evans 			if (cnt == NULL) {
1088*a4bd5210SJason Evans 				old_ctx = NULL;
1089*a4bd5210SJason Evans 				ret = NULL;
1090*a4bd5210SJason Evans 				goto label_oom;
1091*a4bd5210SJason Evans 			}
1092*a4bd5210SJason Evans 			if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U &&
1093*a4bd5210SJason Evans 			    usize <= SMALL_MAXCLASS) {
1094*a4bd5210SJason Evans 				ret = iralloc(ptr, SMALL_MAXCLASS+1, 0, 0,
1095*a4bd5210SJason Evans 				    false, false);
1096*a4bd5210SJason Evans 				if (ret != NULL)
1097*a4bd5210SJason Evans 					arena_prof_promoted(ret, usize);
1098*a4bd5210SJason Evans 				else
1099*a4bd5210SJason Evans 					old_ctx = NULL;
1100*a4bd5210SJason Evans 			} else {
1101*a4bd5210SJason Evans 				ret = iralloc(ptr, size, 0, 0, false, false);
1102*a4bd5210SJason Evans 				if (ret == NULL)
1103*a4bd5210SJason Evans 					old_ctx = NULL;
1104*a4bd5210SJason Evans 			}
1105*a4bd5210SJason Evans 		} else {
1106*a4bd5210SJason Evans 			if (config_stats || (config_valgrind && opt_valgrind))
1107*a4bd5210SJason Evans 				usize = s2u(size);
1108*a4bd5210SJason Evans 			ret = iralloc(ptr, size, 0, 0, false, false);
1109*a4bd5210SJason Evans 		}
1110*a4bd5210SJason Evans 
1111*a4bd5210SJason Evans label_oom:
1112*a4bd5210SJason Evans 		if (ret == NULL) {
1113*a4bd5210SJason Evans 			if (config_xmalloc && opt_xmalloc) {
1114*a4bd5210SJason Evans 				malloc_write("<jemalloc>: Error in realloc(): "
1115*a4bd5210SJason Evans 				    "out of memory\n");
1116*a4bd5210SJason Evans 				abort();
1117*a4bd5210SJason Evans 			}
1118*a4bd5210SJason Evans 			errno = ENOMEM;
1119*a4bd5210SJason Evans 		}
1120*a4bd5210SJason Evans 	} else {
1121*a4bd5210SJason Evans 		/* realloc(NULL, size) is equivalent to malloc(size). */
1122*a4bd5210SJason Evans 		if (config_prof && opt_prof)
1123*a4bd5210SJason Evans 			old_ctx = NULL;
1124*a4bd5210SJason Evans 		if (malloc_init()) {
1125*a4bd5210SJason Evans 			if (config_prof && opt_prof)
1126*a4bd5210SJason Evans 				cnt = NULL;
1127*a4bd5210SJason Evans 			ret = NULL;
1128*a4bd5210SJason Evans 		} else {
1129*a4bd5210SJason Evans 			if (config_prof && opt_prof) {
1130*a4bd5210SJason Evans 				usize = s2u(size);
1131*a4bd5210SJason Evans 				PROF_ALLOC_PREP(1, usize, cnt);
1132*a4bd5210SJason Evans 				if (cnt == NULL)
1133*a4bd5210SJason Evans 					ret = NULL;
1134*a4bd5210SJason Evans 				else {
1135*a4bd5210SJason Evans 					if (prof_promote && (uintptr_t)cnt !=
1136*a4bd5210SJason Evans 					    (uintptr_t)1U && usize <=
1137*a4bd5210SJason Evans 					    SMALL_MAXCLASS) {
1138*a4bd5210SJason Evans 						ret = imalloc(SMALL_MAXCLASS+1);
1139*a4bd5210SJason Evans 						if (ret != NULL) {
1140*a4bd5210SJason Evans 							arena_prof_promoted(ret,
1141*a4bd5210SJason Evans 							    usize);
1142*a4bd5210SJason Evans 						}
1143*a4bd5210SJason Evans 					} else
1144*a4bd5210SJason Evans 						ret = imalloc(size);
1145*a4bd5210SJason Evans 				}
1146*a4bd5210SJason Evans 			} else {
1147*a4bd5210SJason Evans 				if (config_stats || (config_valgrind &&
1148*a4bd5210SJason Evans 				    opt_valgrind))
1149*a4bd5210SJason Evans 					usize = s2u(size);
1150*a4bd5210SJason Evans 				ret = imalloc(size);
1151*a4bd5210SJason Evans 			}
1152*a4bd5210SJason Evans 		}
1153*a4bd5210SJason Evans 
1154*a4bd5210SJason Evans 		if (ret == NULL) {
1155*a4bd5210SJason Evans 			if (config_xmalloc && opt_xmalloc) {
1156*a4bd5210SJason Evans 				malloc_write("<jemalloc>: Error in realloc(): "
1157*a4bd5210SJason Evans 				    "out of memory\n");
1158*a4bd5210SJason Evans 				abort();
1159*a4bd5210SJason Evans 			}
1160*a4bd5210SJason Evans 			errno = ENOMEM;
1161*a4bd5210SJason Evans 		}
1162*a4bd5210SJason Evans 	}
1163*a4bd5210SJason Evans 
1164*a4bd5210SJason Evans label_return:
1165*a4bd5210SJason Evans 	if (config_prof && opt_prof)
1166*a4bd5210SJason Evans 		prof_realloc(ret, usize, cnt, old_size, old_ctx);
1167*a4bd5210SJason Evans 	if (config_stats && ret != NULL) {
1168*a4bd5210SJason Evans 		thread_allocated_t *ta;
1169*a4bd5210SJason Evans 		assert(usize == isalloc(ret, config_prof));
1170*a4bd5210SJason Evans 		ta = thread_allocated_tsd_get();
1171*a4bd5210SJason Evans 		ta->allocated += usize;
1172*a4bd5210SJason Evans 		ta->deallocated += old_size;
1173*a4bd5210SJason Evans 	}
1174*a4bd5210SJason Evans 	UTRACE(ptr, size, ret);
1175*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_REALLOC(ret, usize, ptr, old_size, old_rzsize, false);
1176*a4bd5210SJason Evans 	return (ret);
1177*a4bd5210SJason Evans }
1178*a4bd5210SJason Evans 
1179*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1180*a4bd5210SJason Evans void
1181*a4bd5210SJason Evans je_free(void *ptr)
1182*a4bd5210SJason Evans {
1183*a4bd5210SJason Evans 
1184*a4bd5210SJason Evans 	UTRACE(ptr, 0, 0);
1185*a4bd5210SJason Evans 	if (ptr != NULL) {
1186*a4bd5210SJason Evans 		size_t usize;
1187*a4bd5210SJason Evans 		size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1188*a4bd5210SJason Evans 
1189*a4bd5210SJason Evans 		assert(malloc_initialized || IS_INITIALIZER);
1190*a4bd5210SJason Evans 
1191*a4bd5210SJason Evans 		if (config_prof && opt_prof) {
1192*a4bd5210SJason Evans 			usize = isalloc(ptr, config_prof);
1193*a4bd5210SJason Evans 			prof_free(ptr, usize);
1194*a4bd5210SJason Evans 		} else if (config_stats || config_valgrind)
1195*a4bd5210SJason Evans 			usize = isalloc(ptr, config_prof);
1196*a4bd5210SJason Evans 		if (config_stats)
1197*a4bd5210SJason Evans 			thread_allocated_tsd_get()->deallocated += usize;
1198*a4bd5210SJason Evans 		if (config_valgrind && opt_valgrind)
1199*a4bd5210SJason Evans 			rzsize = p2rz(ptr);
1200*a4bd5210SJason Evans 		iqalloc(ptr);
1201*a4bd5210SJason Evans 		JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1202*a4bd5210SJason Evans 	}
1203*a4bd5210SJason Evans }
1204*a4bd5210SJason Evans 
1205*a4bd5210SJason Evans /*
1206*a4bd5210SJason Evans  * End malloc(3)-compatible functions.
1207*a4bd5210SJason Evans  */
1208*a4bd5210SJason Evans /******************************************************************************/
1209*a4bd5210SJason Evans /*
1210*a4bd5210SJason Evans  * Begin non-standard override functions.
1211*a4bd5210SJason Evans  */
1212*a4bd5210SJason Evans 
1213*a4bd5210SJason Evans #ifdef JEMALLOC_OVERRIDE_MEMALIGN
1214*a4bd5210SJason Evans JEMALLOC_ATTR(malloc)
1215*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1216*a4bd5210SJason Evans void *
1217*a4bd5210SJason Evans je_memalign(size_t alignment, size_t size)
1218*a4bd5210SJason Evans {
1219*a4bd5210SJason Evans 	void *ret JEMALLOC_CC_SILENCE_INIT(NULL);
1220*a4bd5210SJason Evans 	imemalign(&ret, alignment, size, 1);
1221*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, size, false);
1222*a4bd5210SJason Evans 	return (ret);
1223*a4bd5210SJason Evans }
1224*a4bd5210SJason Evans #endif
1225*a4bd5210SJason Evans 
1226*a4bd5210SJason Evans #ifdef JEMALLOC_OVERRIDE_VALLOC
1227*a4bd5210SJason Evans JEMALLOC_ATTR(malloc)
1228*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1229*a4bd5210SJason Evans void *
1230*a4bd5210SJason Evans je_valloc(size_t size)
1231*a4bd5210SJason Evans {
1232*a4bd5210SJason Evans 	void *ret JEMALLOC_CC_SILENCE_INIT(NULL);
1233*a4bd5210SJason Evans 	imemalign(&ret, PAGE, size, 1);
1234*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(ret != NULL, ret, size, false);
1235*a4bd5210SJason Evans 	return (ret);
1236*a4bd5210SJason Evans }
1237*a4bd5210SJason Evans #endif
1238*a4bd5210SJason Evans 
1239*a4bd5210SJason Evans /*
1240*a4bd5210SJason Evans  * is_malloc(je_malloc) is some macro magic to detect if jemalloc_defs.h has
1241*a4bd5210SJason Evans  * #define je_malloc malloc
1242*a4bd5210SJason Evans  */
1243*a4bd5210SJason Evans #define	malloc_is_malloc 1
1244*a4bd5210SJason Evans #define	is_malloc_(a) malloc_is_ ## a
1245*a4bd5210SJason Evans #define	is_malloc(a) is_malloc_(a)
1246*a4bd5210SJason Evans 
1247*a4bd5210SJason Evans #if ((is_malloc(je_malloc) == 1) && defined(__GLIBC__) && !defined(__UCLIBC__))
1248*a4bd5210SJason Evans /*
1249*a4bd5210SJason Evans  * glibc provides the RTLD_DEEPBIND flag for dlopen which can make it possible
1250*a4bd5210SJason Evans  * to inconsistently reference libc's malloc(3)-compatible functions
1251*a4bd5210SJason Evans  * (https://bugzilla.mozilla.org/show_bug.cgi?id=493541).
1252*a4bd5210SJason Evans  *
1253*a4bd5210SJason Evans  * These definitions interpose hooks in glibc.  The functions are actually
1254*a4bd5210SJason Evans  * passed an extra argument for the caller return address, which will be
1255*a4bd5210SJason Evans  * ignored.
1256*a4bd5210SJason Evans  */
1257*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1258*a4bd5210SJason Evans void (* const __free_hook)(void *ptr) = je_free;
1259*a4bd5210SJason Evans 
1260*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1261*a4bd5210SJason Evans void *(* const __malloc_hook)(size_t size) = je_malloc;
1262*a4bd5210SJason Evans 
1263*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1264*a4bd5210SJason Evans void *(* const __realloc_hook)(void *ptr, size_t size) = je_realloc;
1265*a4bd5210SJason Evans 
1266*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1267*a4bd5210SJason Evans void *(* const __memalign_hook)(size_t alignment, size_t size) = je_memalign;
1268*a4bd5210SJason Evans #endif
1269*a4bd5210SJason Evans 
1270*a4bd5210SJason Evans /*
1271*a4bd5210SJason Evans  * End non-standard override functions.
1272*a4bd5210SJason Evans  */
1273*a4bd5210SJason Evans /******************************************************************************/
1274*a4bd5210SJason Evans /*
1275*a4bd5210SJason Evans  * Begin non-standard functions.
1276*a4bd5210SJason Evans  */
1277*a4bd5210SJason Evans 
1278*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1279*a4bd5210SJason Evans size_t
1280*a4bd5210SJason Evans je_malloc_usable_size(const void *ptr)
1281*a4bd5210SJason Evans {
1282*a4bd5210SJason Evans 	size_t ret;
1283*a4bd5210SJason Evans 
1284*a4bd5210SJason Evans 	assert(malloc_initialized || IS_INITIALIZER);
1285*a4bd5210SJason Evans 
1286*a4bd5210SJason Evans 	if (config_ivsalloc)
1287*a4bd5210SJason Evans 		ret = ivsalloc(ptr, config_prof);
1288*a4bd5210SJason Evans 	else
1289*a4bd5210SJason Evans 		ret = (ptr != NULL) ? isalloc(ptr, config_prof) : 0;
1290*a4bd5210SJason Evans 
1291*a4bd5210SJason Evans 	return (ret);
1292*a4bd5210SJason Evans }
1293*a4bd5210SJason Evans 
1294*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1295*a4bd5210SJason Evans void
1296*a4bd5210SJason Evans je_malloc_stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
1297*a4bd5210SJason Evans     const char *opts)
1298*a4bd5210SJason Evans {
1299*a4bd5210SJason Evans 
1300*a4bd5210SJason Evans 	stats_print(write_cb, cbopaque, opts);
1301*a4bd5210SJason Evans }
1302*a4bd5210SJason Evans 
1303*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1304*a4bd5210SJason Evans int
1305*a4bd5210SJason Evans je_mallctl(const char *name, void *oldp, size_t *oldlenp, void *newp,
1306*a4bd5210SJason Evans     size_t newlen)
1307*a4bd5210SJason Evans {
1308*a4bd5210SJason Evans 
1309*a4bd5210SJason Evans 	if (malloc_init())
1310*a4bd5210SJason Evans 		return (EAGAIN);
1311*a4bd5210SJason Evans 
1312*a4bd5210SJason Evans 	return (ctl_byname(name, oldp, oldlenp, newp, newlen));
1313*a4bd5210SJason Evans }
1314*a4bd5210SJason Evans 
1315*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1316*a4bd5210SJason Evans int
1317*a4bd5210SJason Evans je_mallctlnametomib(const char *name, size_t *mibp, size_t *miblenp)
1318*a4bd5210SJason Evans {
1319*a4bd5210SJason Evans 
1320*a4bd5210SJason Evans 	if (malloc_init())
1321*a4bd5210SJason Evans 		return (EAGAIN);
1322*a4bd5210SJason Evans 
1323*a4bd5210SJason Evans 	return (ctl_nametomib(name, mibp, miblenp));
1324*a4bd5210SJason Evans }
1325*a4bd5210SJason Evans 
1326*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1327*a4bd5210SJason Evans int
1328*a4bd5210SJason Evans je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
1329*a4bd5210SJason Evans   void *newp, size_t newlen)
1330*a4bd5210SJason Evans {
1331*a4bd5210SJason Evans 
1332*a4bd5210SJason Evans 	if (malloc_init())
1333*a4bd5210SJason Evans 		return (EAGAIN);
1334*a4bd5210SJason Evans 
1335*a4bd5210SJason Evans 	return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen));
1336*a4bd5210SJason Evans }
1337*a4bd5210SJason Evans 
1338*a4bd5210SJason Evans /*
1339*a4bd5210SJason Evans  * End non-standard functions.
1340*a4bd5210SJason Evans  */
1341*a4bd5210SJason Evans /******************************************************************************/
1342*a4bd5210SJason Evans /*
1343*a4bd5210SJason Evans  * Begin experimental functions.
1344*a4bd5210SJason Evans  */
1345*a4bd5210SJason Evans #ifdef JEMALLOC_EXPERIMENTAL
1346*a4bd5210SJason Evans 
1347*a4bd5210SJason Evans JEMALLOC_INLINE void *
1348*a4bd5210SJason Evans iallocm(size_t usize, size_t alignment, bool zero)
1349*a4bd5210SJason Evans {
1350*a4bd5210SJason Evans 
1351*a4bd5210SJason Evans 	assert(usize == ((alignment == 0) ? s2u(usize) : sa2u(usize,
1352*a4bd5210SJason Evans 	    alignment)));
1353*a4bd5210SJason Evans 
1354*a4bd5210SJason Evans 	if (alignment != 0)
1355*a4bd5210SJason Evans 		return (ipalloc(usize, alignment, zero));
1356*a4bd5210SJason Evans 	else if (zero)
1357*a4bd5210SJason Evans 		return (icalloc(usize));
1358*a4bd5210SJason Evans 	else
1359*a4bd5210SJason Evans 		return (imalloc(usize));
1360*a4bd5210SJason Evans }
1361*a4bd5210SJason Evans 
1362*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
1363*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1364*a4bd5210SJason Evans int
1365*a4bd5210SJason Evans je_allocm(void **ptr, size_t *rsize, size_t size, int flags)
1366*a4bd5210SJason Evans {
1367*a4bd5210SJason Evans 	void *p;
1368*a4bd5210SJason Evans 	size_t usize;
1369*a4bd5210SJason Evans 	size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1370*a4bd5210SJason Evans 	    & (SIZE_T_MAX-1));
1371*a4bd5210SJason Evans 	bool zero = flags & ALLOCM_ZERO;
1372*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt;
1373*a4bd5210SJason Evans 
1374*a4bd5210SJason Evans 	assert(ptr != NULL);
1375*a4bd5210SJason Evans 	assert(size != 0);
1376*a4bd5210SJason Evans 
1377*a4bd5210SJason Evans 	if (malloc_init())
1378*a4bd5210SJason Evans 		goto label_oom;
1379*a4bd5210SJason Evans 
1380*a4bd5210SJason Evans 	usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1381*a4bd5210SJason Evans 	if (usize == 0)
1382*a4bd5210SJason Evans 		goto label_oom;
1383*a4bd5210SJason Evans 
1384*a4bd5210SJason Evans 	if (config_prof && opt_prof) {
1385*a4bd5210SJason Evans 		PROF_ALLOC_PREP(1, usize, cnt);
1386*a4bd5210SJason Evans 		if (cnt == NULL)
1387*a4bd5210SJason Evans 			goto label_oom;
1388*a4bd5210SJason Evans 		if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U && usize <=
1389*a4bd5210SJason Evans 		    SMALL_MAXCLASS) {
1390*a4bd5210SJason Evans 			size_t usize_promoted = (alignment == 0) ?
1391*a4bd5210SJason Evans 			    s2u(SMALL_MAXCLASS+1) : sa2u(SMALL_MAXCLASS+1,
1392*a4bd5210SJason Evans 			    alignment);
1393*a4bd5210SJason Evans 			assert(usize_promoted != 0);
1394*a4bd5210SJason Evans 			p = iallocm(usize_promoted, alignment, zero);
1395*a4bd5210SJason Evans 			if (p == NULL)
1396*a4bd5210SJason Evans 				goto label_oom;
1397*a4bd5210SJason Evans 			arena_prof_promoted(p, usize);
1398*a4bd5210SJason Evans 		} else {
1399*a4bd5210SJason Evans 			p = iallocm(usize, alignment, zero);
1400*a4bd5210SJason Evans 			if (p == NULL)
1401*a4bd5210SJason Evans 				goto label_oom;
1402*a4bd5210SJason Evans 		}
1403*a4bd5210SJason Evans 		prof_malloc(p, usize, cnt);
1404*a4bd5210SJason Evans 	} else {
1405*a4bd5210SJason Evans 		p = iallocm(usize, alignment, zero);
1406*a4bd5210SJason Evans 		if (p == NULL)
1407*a4bd5210SJason Evans 			goto label_oom;
1408*a4bd5210SJason Evans 	}
1409*a4bd5210SJason Evans 	if (rsize != NULL)
1410*a4bd5210SJason Evans 		*rsize = usize;
1411*a4bd5210SJason Evans 
1412*a4bd5210SJason Evans 	*ptr = p;
1413*a4bd5210SJason Evans 	if (config_stats) {
1414*a4bd5210SJason Evans 		assert(usize == isalloc(p, config_prof));
1415*a4bd5210SJason Evans 		thread_allocated_tsd_get()->allocated += usize;
1416*a4bd5210SJason Evans 	}
1417*a4bd5210SJason Evans 	UTRACE(0, size, p);
1418*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_MALLOC(true, p, usize, zero);
1419*a4bd5210SJason Evans 	return (ALLOCM_SUCCESS);
1420*a4bd5210SJason Evans label_oom:
1421*a4bd5210SJason Evans 	if (config_xmalloc && opt_xmalloc) {
1422*a4bd5210SJason Evans 		malloc_write("<jemalloc>: Error in allocm(): "
1423*a4bd5210SJason Evans 		    "out of memory\n");
1424*a4bd5210SJason Evans 		abort();
1425*a4bd5210SJason Evans 	}
1426*a4bd5210SJason Evans 	*ptr = NULL;
1427*a4bd5210SJason Evans 	UTRACE(0, size, 0);
1428*a4bd5210SJason Evans 	return (ALLOCM_ERR_OOM);
1429*a4bd5210SJason Evans }
1430*a4bd5210SJason Evans 
1431*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
1432*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1433*a4bd5210SJason Evans int
1434*a4bd5210SJason Evans je_rallocm(void **ptr, size_t *rsize, size_t size, size_t extra, int flags)
1435*a4bd5210SJason Evans {
1436*a4bd5210SJason Evans 	void *p, *q;
1437*a4bd5210SJason Evans 	size_t usize;
1438*a4bd5210SJason Evans 	size_t old_size;
1439*a4bd5210SJason Evans 	size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
1440*a4bd5210SJason Evans 	size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1441*a4bd5210SJason Evans 	    & (SIZE_T_MAX-1));
1442*a4bd5210SJason Evans 	bool zero = flags & ALLOCM_ZERO;
1443*a4bd5210SJason Evans 	bool no_move = flags & ALLOCM_NO_MOVE;
1444*a4bd5210SJason Evans 	prof_thr_cnt_t *cnt;
1445*a4bd5210SJason Evans 
1446*a4bd5210SJason Evans 	assert(ptr != NULL);
1447*a4bd5210SJason Evans 	assert(*ptr != NULL);
1448*a4bd5210SJason Evans 	assert(size != 0);
1449*a4bd5210SJason Evans 	assert(SIZE_T_MAX - size >= extra);
1450*a4bd5210SJason Evans 	assert(malloc_initialized || IS_INITIALIZER);
1451*a4bd5210SJason Evans 
1452*a4bd5210SJason Evans 	p = *ptr;
1453*a4bd5210SJason Evans 	if (config_prof && opt_prof) {
1454*a4bd5210SJason Evans 		/*
1455*a4bd5210SJason Evans 		 * usize isn't knowable before iralloc() returns when extra is
1456*a4bd5210SJason Evans 		 * non-zero.  Therefore, compute its maximum possible value and
1457*a4bd5210SJason Evans 		 * use that in PROF_ALLOC_PREP() to decide whether to capture a
1458*a4bd5210SJason Evans 		 * backtrace.  prof_realloc() will use the actual usize to
1459*a4bd5210SJason Evans 		 * decide whether to sample.
1460*a4bd5210SJason Evans 		 */
1461*a4bd5210SJason Evans 		size_t max_usize = (alignment == 0) ? s2u(size+extra) :
1462*a4bd5210SJason Evans 		    sa2u(size+extra, alignment);
1463*a4bd5210SJason Evans 		prof_ctx_t *old_ctx = prof_ctx_get(p);
1464*a4bd5210SJason Evans 		old_size = isalloc(p, true);
1465*a4bd5210SJason Evans 		if (config_valgrind && opt_valgrind)
1466*a4bd5210SJason Evans 			old_rzsize = p2rz(p);
1467*a4bd5210SJason Evans 		PROF_ALLOC_PREP(1, max_usize, cnt);
1468*a4bd5210SJason Evans 		if (cnt == NULL)
1469*a4bd5210SJason Evans 			goto label_oom;
1470*a4bd5210SJason Evans 		/*
1471*a4bd5210SJason Evans 		 * Use minimum usize to determine whether promotion may happen.
1472*a4bd5210SJason Evans 		 */
1473*a4bd5210SJason Evans 		if (prof_promote && (uintptr_t)cnt != (uintptr_t)1U
1474*a4bd5210SJason Evans 		    && ((alignment == 0) ? s2u(size) : sa2u(size, alignment))
1475*a4bd5210SJason Evans 		    <= SMALL_MAXCLASS) {
1476*a4bd5210SJason Evans 			q = iralloc(p, SMALL_MAXCLASS+1, (SMALL_MAXCLASS+1 >=
1477*a4bd5210SJason Evans 			    size+extra) ? 0 : size+extra - (SMALL_MAXCLASS+1),
1478*a4bd5210SJason Evans 			    alignment, zero, no_move);
1479*a4bd5210SJason Evans 			if (q == NULL)
1480*a4bd5210SJason Evans 				goto label_err;
1481*a4bd5210SJason Evans 			if (max_usize < PAGE) {
1482*a4bd5210SJason Evans 				usize = max_usize;
1483*a4bd5210SJason Evans 				arena_prof_promoted(q, usize);
1484*a4bd5210SJason Evans 			} else
1485*a4bd5210SJason Evans 				usize = isalloc(q, config_prof);
1486*a4bd5210SJason Evans 		} else {
1487*a4bd5210SJason Evans 			q = iralloc(p, size, extra, alignment, zero, no_move);
1488*a4bd5210SJason Evans 			if (q == NULL)
1489*a4bd5210SJason Evans 				goto label_err;
1490*a4bd5210SJason Evans 			usize = isalloc(q, config_prof);
1491*a4bd5210SJason Evans 		}
1492*a4bd5210SJason Evans 		prof_realloc(q, usize, cnt, old_size, old_ctx);
1493*a4bd5210SJason Evans 		if (rsize != NULL)
1494*a4bd5210SJason Evans 			*rsize = usize;
1495*a4bd5210SJason Evans 	} else {
1496*a4bd5210SJason Evans 		if (config_stats) {
1497*a4bd5210SJason Evans 			old_size = isalloc(p, false);
1498*a4bd5210SJason Evans 			if (config_valgrind && opt_valgrind)
1499*a4bd5210SJason Evans 				old_rzsize = u2rz(old_size);
1500*a4bd5210SJason Evans 		} else if (config_valgrind && opt_valgrind) {
1501*a4bd5210SJason Evans 			old_size = isalloc(p, false);
1502*a4bd5210SJason Evans 			old_rzsize = u2rz(old_size);
1503*a4bd5210SJason Evans 		}
1504*a4bd5210SJason Evans 		q = iralloc(p, size, extra, alignment, zero, no_move);
1505*a4bd5210SJason Evans 		if (q == NULL)
1506*a4bd5210SJason Evans 			goto label_err;
1507*a4bd5210SJason Evans 		if (config_stats)
1508*a4bd5210SJason Evans 			usize = isalloc(q, config_prof);
1509*a4bd5210SJason Evans 		if (rsize != NULL) {
1510*a4bd5210SJason Evans 			if (config_stats == false)
1511*a4bd5210SJason Evans 				usize = isalloc(q, config_prof);
1512*a4bd5210SJason Evans 			*rsize = usize;
1513*a4bd5210SJason Evans 		}
1514*a4bd5210SJason Evans 	}
1515*a4bd5210SJason Evans 
1516*a4bd5210SJason Evans 	*ptr = q;
1517*a4bd5210SJason Evans 	if (config_stats) {
1518*a4bd5210SJason Evans 		thread_allocated_t *ta;
1519*a4bd5210SJason Evans 		ta = thread_allocated_tsd_get();
1520*a4bd5210SJason Evans 		ta->allocated += usize;
1521*a4bd5210SJason Evans 		ta->deallocated += old_size;
1522*a4bd5210SJason Evans 	}
1523*a4bd5210SJason Evans 	UTRACE(p, size, q);
1524*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_REALLOC(q, usize, p, old_size, old_rzsize, zero);
1525*a4bd5210SJason Evans 	return (ALLOCM_SUCCESS);
1526*a4bd5210SJason Evans label_err:
1527*a4bd5210SJason Evans 	if (no_move) {
1528*a4bd5210SJason Evans 		UTRACE(p, size, q);
1529*a4bd5210SJason Evans 		return (ALLOCM_ERR_NOT_MOVED);
1530*a4bd5210SJason Evans 	}
1531*a4bd5210SJason Evans label_oom:
1532*a4bd5210SJason Evans 	if (config_xmalloc && opt_xmalloc) {
1533*a4bd5210SJason Evans 		malloc_write("<jemalloc>: Error in rallocm(): "
1534*a4bd5210SJason Evans 		    "out of memory\n");
1535*a4bd5210SJason Evans 		abort();
1536*a4bd5210SJason Evans 	}
1537*a4bd5210SJason Evans 	UTRACE(p, size, 0);
1538*a4bd5210SJason Evans 	return (ALLOCM_ERR_OOM);
1539*a4bd5210SJason Evans }
1540*a4bd5210SJason Evans 
1541*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
1542*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1543*a4bd5210SJason Evans int
1544*a4bd5210SJason Evans je_sallocm(const void *ptr, size_t *rsize, int flags)
1545*a4bd5210SJason Evans {
1546*a4bd5210SJason Evans 	size_t sz;
1547*a4bd5210SJason Evans 
1548*a4bd5210SJason Evans 	assert(malloc_initialized || IS_INITIALIZER);
1549*a4bd5210SJason Evans 
1550*a4bd5210SJason Evans 	if (config_ivsalloc)
1551*a4bd5210SJason Evans 		sz = ivsalloc(ptr, config_prof);
1552*a4bd5210SJason Evans 	else {
1553*a4bd5210SJason Evans 		assert(ptr != NULL);
1554*a4bd5210SJason Evans 		sz = isalloc(ptr, config_prof);
1555*a4bd5210SJason Evans 	}
1556*a4bd5210SJason Evans 	assert(rsize != NULL);
1557*a4bd5210SJason Evans 	*rsize = sz;
1558*a4bd5210SJason Evans 
1559*a4bd5210SJason Evans 	return (ALLOCM_SUCCESS);
1560*a4bd5210SJason Evans }
1561*a4bd5210SJason Evans 
1562*a4bd5210SJason Evans JEMALLOC_ATTR(nonnull(1))
1563*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1564*a4bd5210SJason Evans int
1565*a4bd5210SJason Evans je_dallocm(void *ptr, int flags)
1566*a4bd5210SJason Evans {
1567*a4bd5210SJason Evans 	size_t usize;
1568*a4bd5210SJason Evans 	size_t rzsize JEMALLOC_CC_SILENCE_INIT(0);
1569*a4bd5210SJason Evans 
1570*a4bd5210SJason Evans 	assert(ptr != NULL);
1571*a4bd5210SJason Evans 	assert(malloc_initialized || IS_INITIALIZER);
1572*a4bd5210SJason Evans 
1573*a4bd5210SJason Evans 	UTRACE(ptr, 0, 0);
1574*a4bd5210SJason Evans 	if (config_stats || config_valgrind)
1575*a4bd5210SJason Evans 		usize = isalloc(ptr, config_prof);
1576*a4bd5210SJason Evans 	if (config_prof && opt_prof) {
1577*a4bd5210SJason Evans 		if (config_stats == false && config_valgrind == false)
1578*a4bd5210SJason Evans 			usize = isalloc(ptr, config_prof);
1579*a4bd5210SJason Evans 		prof_free(ptr, usize);
1580*a4bd5210SJason Evans 	}
1581*a4bd5210SJason Evans 	if (config_stats)
1582*a4bd5210SJason Evans 		thread_allocated_tsd_get()->deallocated += usize;
1583*a4bd5210SJason Evans 	if (config_valgrind && opt_valgrind)
1584*a4bd5210SJason Evans 		rzsize = p2rz(ptr);
1585*a4bd5210SJason Evans 	iqalloc(ptr);
1586*a4bd5210SJason Evans 	JEMALLOC_VALGRIND_FREE(ptr, rzsize);
1587*a4bd5210SJason Evans 
1588*a4bd5210SJason Evans 	return (ALLOCM_SUCCESS);
1589*a4bd5210SJason Evans }
1590*a4bd5210SJason Evans 
1591*a4bd5210SJason Evans JEMALLOC_ATTR(visibility("default"))
1592*a4bd5210SJason Evans int
1593*a4bd5210SJason Evans je_nallocm(size_t *rsize, size_t size, int flags)
1594*a4bd5210SJason Evans {
1595*a4bd5210SJason Evans 	size_t usize;
1596*a4bd5210SJason Evans 	size_t alignment = (ZU(1) << (flags & ALLOCM_LG_ALIGN_MASK)
1597*a4bd5210SJason Evans 	    & (SIZE_T_MAX-1));
1598*a4bd5210SJason Evans 
1599*a4bd5210SJason Evans 	assert(size != 0);
1600*a4bd5210SJason Evans 
1601*a4bd5210SJason Evans 	if (malloc_init())
1602*a4bd5210SJason Evans 		return (ALLOCM_ERR_OOM);
1603*a4bd5210SJason Evans 
1604*a4bd5210SJason Evans 	usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
1605*a4bd5210SJason Evans 	if (usize == 0)
1606*a4bd5210SJason Evans 		return (ALLOCM_ERR_OOM);
1607*a4bd5210SJason Evans 
1608*a4bd5210SJason Evans 	if (rsize != NULL)
1609*a4bd5210SJason Evans 		*rsize = usize;
1610*a4bd5210SJason Evans 	return (ALLOCM_SUCCESS);
1611*a4bd5210SJason Evans }
1612*a4bd5210SJason Evans 
1613*a4bd5210SJason Evans #endif
1614*a4bd5210SJason Evans /*
1615*a4bd5210SJason Evans  * End experimental functions.
1616*a4bd5210SJason Evans  */
1617*a4bd5210SJason Evans /******************************************************************************/
1618*a4bd5210SJason Evans /*
1619*a4bd5210SJason Evans  * The following functions are used by threading libraries for protection of
1620*a4bd5210SJason Evans  * malloc during fork().
1621*a4bd5210SJason Evans  */
1622*a4bd5210SJason Evans 
1623*a4bd5210SJason Evans #ifndef JEMALLOC_MUTEX_INIT_CB
1624*a4bd5210SJason Evans void
1625*a4bd5210SJason Evans jemalloc_prefork(void)
1626*a4bd5210SJason Evans #else
1627*a4bd5210SJason Evans void
1628*a4bd5210SJason Evans _malloc_prefork(void)
1629*a4bd5210SJason Evans #endif
1630*a4bd5210SJason Evans {
1631*a4bd5210SJason Evans 	unsigned i;
1632*a4bd5210SJason Evans 
1633*a4bd5210SJason Evans 	/* Acquire all mutexes in a safe order. */
1634*a4bd5210SJason Evans 	malloc_mutex_prefork(&arenas_lock);
1635*a4bd5210SJason Evans 	for (i = 0; i < narenas; i++) {
1636*a4bd5210SJason Evans 		if (arenas[i] != NULL)
1637*a4bd5210SJason Evans 			arena_prefork(arenas[i]);
1638*a4bd5210SJason Evans 	}
1639*a4bd5210SJason Evans 	base_prefork();
1640*a4bd5210SJason Evans 	huge_prefork();
1641*a4bd5210SJason Evans 	chunk_dss_prefork();
1642*a4bd5210SJason Evans }
1643*a4bd5210SJason Evans 
1644*a4bd5210SJason Evans #ifndef JEMALLOC_MUTEX_INIT_CB
1645*a4bd5210SJason Evans void
1646*a4bd5210SJason Evans jemalloc_postfork_parent(void)
1647*a4bd5210SJason Evans #else
1648*a4bd5210SJason Evans void
1649*a4bd5210SJason Evans _malloc_postfork(void)
1650*a4bd5210SJason Evans #endif
1651*a4bd5210SJason Evans {
1652*a4bd5210SJason Evans 	unsigned i;
1653*a4bd5210SJason Evans 
1654*a4bd5210SJason Evans 	/* Release all mutexes, now that fork() has completed. */
1655*a4bd5210SJason Evans 	chunk_dss_postfork_parent();
1656*a4bd5210SJason Evans 	huge_postfork_parent();
1657*a4bd5210SJason Evans 	base_postfork_parent();
1658*a4bd5210SJason Evans 	for (i = 0; i < narenas; i++) {
1659*a4bd5210SJason Evans 		if (arenas[i] != NULL)
1660*a4bd5210SJason Evans 			arena_postfork_parent(arenas[i]);
1661*a4bd5210SJason Evans 	}
1662*a4bd5210SJason Evans 	malloc_mutex_postfork_parent(&arenas_lock);
1663*a4bd5210SJason Evans }
1664*a4bd5210SJason Evans 
1665*a4bd5210SJason Evans void
1666*a4bd5210SJason Evans jemalloc_postfork_child(void)
1667*a4bd5210SJason Evans {
1668*a4bd5210SJason Evans 	unsigned i;
1669*a4bd5210SJason Evans 
1670*a4bd5210SJason Evans 	/* Release all mutexes, now that fork() has completed. */
1671*a4bd5210SJason Evans 	chunk_dss_postfork_child();
1672*a4bd5210SJason Evans 	huge_postfork_child();
1673*a4bd5210SJason Evans 	base_postfork_child();
1674*a4bd5210SJason Evans 	for (i = 0; i < narenas; i++) {
1675*a4bd5210SJason Evans 		if (arenas[i] != NULL)
1676*a4bd5210SJason Evans 			arena_postfork_child(arenas[i]);
1677*a4bd5210SJason Evans 	}
1678*a4bd5210SJason Evans 	malloc_mutex_postfork_child(&arenas_lock);
1679*a4bd5210SJason Evans }
1680*a4bd5210SJason Evans 
1681*a4bd5210SJason Evans /******************************************************************************/
1682*a4bd5210SJason Evans /*
1683*a4bd5210SJason Evans  * The following functions are used for TLS allocation/deallocation in static
1684*a4bd5210SJason Evans  * binaries on FreeBSD.  The primary difference between these and i[mcd]alloc()
1685*a4bd5210SJason Evans  * is that these avoid accessing TLS variables.
1686*a4bd5210SJason Evans  */
1687*a4bd5210SJason Evans 
1688*a4bd5210SJason Evans static void *
1689*a4bd5210SJason Evans a0alloc(size_t size, bool zero)
1690*a4bd5210SJason Evans {
1691*a4bd5210SJason Evans 
1692*a4bd5210SJason Evans 	if (malloc_init())
1693*a4bd5210SJason Evans 		return (NULL);
1694*a4bd5210SJason Evans 
1695*a4bd5210SJason Evans 	if (size == 0)
1696*a4bd5210SJason Evans 		size = 1;
1697*a4bd5210SJason Evans 
1698*a4bd5210SJason Evans 	if (size <= arena_maxclass)
1699*a4bd5210SJason Evans 		return (arena_malloc(arenas[0], size, zero, false));
1700*a4bd5210SJason Evans 	else
1701*a4bd5210SJason Evans 		return (huge_malloc(size, zero));
1702*a4bd5210SJason Evans }
1703*a4bd5210SJason Evans 
1704*a4bd5210SJason Evans void *
1705*a4bd5210SJason Evans a0malloc(size_t size)
1706*a4bd5210SJason Evans {
1707*a4bd5210SJason Evans 
1708*a4bd5210SJason Evans 	return (a0alloc(size, false));
1709*a4bd5210SJason Evans }
1710*a4bd5210SJason Evans 
1711*a4bd5210SJason Evans void *
1712*a4bd5210SJason Evans a0calloc(size_t num, size_t size)
1713*a4bd5210SJason Evans {
1714*a4bd5210SJason Evans 
1715*a4bd5210SJason Evans 	return (a0alloc(num * size, true));
1716*a4bd5210SJason Evans }
1717*a4bd5210SJason Evans 
1718*a4bd5210SJason Evans void
1719*a4bd5210SJason Evans a0free(void *ptr)
1720*a4bd5210SJason Evans {
1721*a4bd5210SJason Evans 	arena_chunk_t *chunk;
1722*a4bd5210SJason Evans 
1723*a4bd5210SJason Evans 	if (ptr == NULL)
1724*a4bd5210SJason Evans 		return;
1725*a4bd5210SJason Evans 
1726*a4bd5210SJason Evans 	chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
1727*a4bd5210SJason Evans 	if (chunk != ptr)
1728*a4bd5210SJason Evans 		arena_dalloc(chunk->arena, chunk, ptr, false);
1729*a4bd5210SJason Evans 	else
1730*a4bd5210SJason Evans 		huge_dalloc(ptr, true);
1731*a4bd5210SJason Evans }
1732*a4bd5210SJason Evans 
1733*a4bd5210SJason Evans /******************************************************************************/
1734