xref: /freebsd/contrib/jemalloc/include/jemalloc/internal/extent_inlines.h (revision c5ad81420c495d1d5de04209b0ec4fcb435c322c)
1 #ifndef JEMALLOC_INTERNAL_EXTENT_INLINES_H
2 #define JEMALLOC_INTERNAL_EXTENT_INLINES_H
3 
4 #include "jemalloc/internal/mutex.h"
5 #include "jemalloc/internal/mutex_pool.h"
6 #include "jemalloc/internal/pages.h"
7 #include "jemalloc/internal/prng.h"
8 #include "jemalloc/internal/ql.h"
9 #include "jemalloc/internal/sc.h"
10 #include "jemalloc/internal/sz.h"
11 
12 static inline void
extent_lock(tsdn_t * tsdn,extent_t * extent)13 extent_lock(tsdn_t *tsdn, extent_t *extent) {
14 	assert(extent != NULL);
15 	mutex_pool_lock(tsdn, &extent_mutex_pool, (uintptr_t)extent);
16 }
17 
18 static inline void
extent_unlock(tsdn_t * tsdn,extent_t * extent)19 extent_unlock(tsdn_t *tsdn, extent_t *extent) {
20 	assert(extent != NULL);
21 	mutex_pool_unlock(tsdn, &extent_mutex_pool, (uintptr_t)extent);
22 }
23 
24 static inline void
extent_lock2(tsdn_t * tsdn,extent_t * extent1,extent_t * extent2)25 extent_lock2(tsdn_t *tsdn, extent_t *extent1, extent_t *extent2) {
26 	assert(extent1 != NULL && extent2 != NULL);
27 	mutex_pool_lock2(tsdn, &extent_mutex_pool, (uintptr_t)extent1,
28 	    (uintptr_t)extent2);
29 }
30 
31 static inline void
extent_unlock2(tsdn_t * tsdn,extent_t * extent1,extent_t * extent2)32 extent_unlock2(tsdn_t *tsdn, extent_t *extent1, extent_t *extent2) {
33 	assert(extent1 != NULL && extent2 != NULL);
34 	mutex_pool_unlock2(tsdn, &extent_mutex_pool, (uintptr_t)extent1,
35 	    (uintptr_t)extent2);
36 }
37 
38 static inline unsigned
extent_arena_ind_get(const extent_t * extent)39 extent_arena_ind_get(const extent_t *extent) {
40 	unsigned arena_ind = (unsigned)((extent->e_bits &
41 	    EXTENT_BITS_ARENA_MASK) >> EXTENT_BITS_ARENA_SHIFT);
42 	assert(arena_ind < MALLOCX_ARENA_LIMIT);
43 
44 	return arena_ind;
45 }
46 
47 static inline arena_t *
extent_arena_get(const extent_t * extent)48 extent_arena_get(const extent_t *extent) {
49 	unsigned arena_ind = extent_arena_ind_get(extent);
50 
51 	return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_ACQUIRE);
52 }
53 
54 static inline szind_t
extent_szind_get_maybe_invalid(const extent_t * extent)55 extent_szind_get_maybe_invalid(const extent_t *extent) {
56 	szind_t szind = (szind_t)((extent->e_bits & EXTENT_BITS_SZIND_MASK) >>
57 	    EXTENT_BITS_SZIND_SHIFT);
58 	assert(szind <= SC_NSIZES);
59 	return szind;
60 }
61 
62 static inline szind_t
extent_szind_get(const extent_t * extent)63 extent_szind_get(const extent_t *extent) {
64 	szind_t szind = extent_szind_get_maybe_invalid(extent);
65 	assert(szind < SC_NSIZES); /* Never call when "invalid". */
66 	return szind;
67 }
68 
69 static inline size_t
extent_usize_get(const extent_t * extent)70 extent_usize_get(const extent_t *extent) {
71 	return sz_index2size(extent_szind_get(extent));
72 }
73 
74 static inline unsigned
extent_binshard_get(const extent_t * extent)75 extent_binshard_get(const extent_t *extent) {
76 	unsigned binshard = (unsigned)((extent->e_bits &
77 	    EXTENT_BITS_BINSHARD_MASK) >> EXTENT_BITS_BINSHARD_SHIFT);
78 	assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
79 	return binshard;
80 }
81 
82 static inline size_t
extent_sn_get(const extent_t * extent)83 extent_sn_get(const extent_t *extent) {
84 	return (size_t)((extent->e_bits & EXTENT_BITS_SN_MASK) >>
85 	    EXTENT_BITS_SN_SHIFT);
86 }
87 
88 static inline extent_state_t
extent_state_get(const extent_t * extent)89 extent_state_get(const extent_t *extent) {
90 	return (extent_state_t)((extent->e_bits & EXTENT_BITS_STATE_MASK) >>
91 	    EXTENT_BITS_STATE_SHIFT);
92 }
93 
94 static inline bool
extent_zeroed_get(const extent_t * extent)95 extent_zeroed_get(const extent_t *extent) {
96 	return (bool)((extent->e_bits & EXTENT_BITS_ZEROED_MASK) >>
97 	    EXTENT_BITS_ZEROED_SHIFT);
98 }
99 
100 static inline bool
extent_committed_get(const extent_t * extent)101 extent_committed_get(const extent_t *extent) {
102 	return (bool)((extent->e_bits & EXTENT_BITS_COMMITTED_MASK) >>
103 	    EXTENT_BITS_COMMITTED_SHIFT);
104 }
105 
106 static inline bool
extent_dumpable_get(const extent_t * extent)107 extent_dumpable_get(const extent_t *extent) {
108 	return (bool)((extent->e_bits & EXTENT_BITS_DUMPABLE_MASK) >>
109 	    EXTENT_BITS_DUMPABLE_SHIFT);
110 }
111 
112 static inline bool
extent_slab_get(const extent_t * extent)113 extent_slab_get(const extent_t *extent) {
114 	return (bool)((extent->e_bits & EXTENT_BITS_SLAB_MASK) >>
115 	    EXTENT_BITS_SLAB_SHIFT);
116 }
117 
118 static inline unsigned
extent_nfree_get(const extent_t * extent)119 extent_nfree_get(const extent_t *extent) {
120 	assert(extent_slab_get(extent));
121 	return (unsigned)((extent->e_bits & EXTENT_BITS_NFREE_MASK) >>
122 	    EXTENT_BITS_NFREE_SHIFT);
123 }
124 
125 static inline void *
extent_base_get(const extent_t * extent)126 extent_base_get(const extent_t *extent) {
127 	assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
128 	    !extent_slab_get(extent));
129 	return PAGE_ADDR2BASE(extent->e_addr);
130 }
131 
132 static inline void *
extent_addr_get(const extent_t * extent)133 extent_addr_get(const extent_t *extent) {
134 	assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
135 	    !extent_slab_get(extent));
136 	return extent->e_addr;
137 }
138 
139 static inline size_t
extent_size_get(const extent_t * extent)140 extent_size_get(const extent_t *extent) {
141 	return (extent->e_size_esn & EXTENT_SIZE_MASK);
142 }
143 
144 static inline size_t
extent_esn_get(const extent_t * extent)145 extent_esn_get(const extent_t *extent) {
146 	return (extent->e_size_esn & EXTENT_ESN_MASK);
147 }
148 
149 static inline size_t
extent_bsize_get(const extent_t * extent)150 extent_bsize_get(const extent_t *extent) {
151 	return extent->e_bsize;
152 }
153 
154 static inline void *
extent_before_get(const extent_t * extent)155 extent_before_get(const extent_t *extent) {
156 	return (void *)((uintptr_t)extent_base_get(extent) - PAGE);
157 }
158 
159 static inline void *
extent_last_get(const extent_t * extent)160 extent_last_get(const extent_t *extent) {
161 	return (void *)((uintptr_t)extent_base_get(extent) +
162 	    extent_size_get(extent) - PAGE);
163 }
164 
165 static inline void *
extent_past_get(const extent_t * extent)166 extent_past_get(const extent_t *extent) {
167 	return (void *)((uintptr_t)extent_base_get(extent) +
168 	    extent_size_get(extent));
169 }
170 
171 static inline arena_slab_data_t *
extent_slab_data_get(extent_t * extent)172 extent_slab_data_get(extent_t *extent) {
173 	assert(extent_slab_get(extent));
174 	return &extent->e_slab_data;
175 }
176 
177 static inline const arena_slab_data_t *
extent_slab_data_get_const(const extent_t * extent)178 extent_slab_data_get_const(const extent_t *extent) {
179 	assert(extent_slab_get(extent));
180 	return &extent->e_slab_data;
181 }
182 
183 static inline prof_tctx_t *
extent_prof_tctx_get(const extent_t * extent)184 extent_prof_tctx_get(const extent_t *extent) {
185 	return (prof_tctx_t *)atomic_load_p(&extent->e_prof_tctx,
186 	    ATOMIC_ACQUIRE);
187 }
188 
189 static inline nstime_t
extent_prof_alloc_time_get(const extent_t * extent)190 extent_prof_alloc_time_get(const extent_t *extent) {
191 	return extent->e_alloc_time;
192 }
193 
194 static inline void
extent_arena_set(extent_t * extent,arena_t * arena)195 extent_arena_set(extent_t *extent, arena_t *arena) {
196 	unsigned arena_ind = (arena != NULL) ? arena_ind_get(arena) : ((1U <<
197 	    MALLOCX_ARENA_BITS) - 1);
198 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_ARENA_MASK) |
199 	    ((uint64_t)arena_ind << EXTENT_BITS_ARENA_SHIFT);
200 }
201 
202 static inline void
extent_binshard_set(extent_t * extent,unsigned binshard)203 extent_binshard_set(extent_t *extent, unsigned binshard) {
204 	/* The assertion assumes szind is set already. */
205 	assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
206 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_BINSHARD_MASK) |
207 	    ((uint64_t)binshard << EXTENT_BITS_BINSHARD_SHIFT);
208 }
209 
210 static inline void
extent_addr_set(extent_t * extent,void * addr)211 extent_addr_set(extent_t *extent, void *addr) {
212 	extent->e_addr = addr;
213 }
214 
215 static inline void
extent_addr_randomize(tsdn_t * tsdn,extent_t * extent,size_t alignment)216 extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) {
217 	assert(extent_base_get(extent) == extent_addr_get(extent));
218 
219 	if (alignment < PAGE) {
220 		unsigned lg_range = LG_PAGE -
221 		    lg_floor(CACHELINE_CEILING(alignment));
222 		size_t r;
223 		if (!tsdn_null(tsdn)) {
224 			tsd_t *tsd = tsdn_tsd(tsdn);
225 			r = (size_t)prng_lg_range_u64(
226 			    tsd_offset_statep_get(tsd), lg_range);
227 		} else {
228 			r = prng_lg_range_zu(
229 			    &extent_arena_get(extent)->offset_state,
230 			    lg_range, true);
231 		}
232 		uintptr_t random_offset = ((uintptr_t)r) << (LG_PAGE -
233 		    lg_range);
234 		extent->e_addr = (void *)((uintptr_t)extent->e_addr +
235 		    random_offset);
236 		assert(ALIGNMENT_ADDR2BASE(extent->e_addr, alignment) ==
237 		    extent->e_addr);
238 	}
239 }
240 
241 static inline void
extent_size_set(extent_t * extent,size_t size)242 extent_size_set(extent_t *extent, size_t size) {
243 	assert((size & ~EXTENT_SIZE_MASK) == 0);
244 	extent->e_size_esn = size | (extent->e_size_esn & ~EXTENT_SIZE_MASK);
245 }
246 
247 static inline void
extent_esn_set(extent_t * extent,size_t esn)248 extent_esn_set(extent_t *extent, size_t esn) {
249 	extent->e_size_esn = (extent->e_size_esn & ~EXTENT_ESN_MASK) | (esn &
250 	    EXTENT_ESN_MASK);
251 }
252 
253 static inline void
extent_bsize_set(extent_t * extent,size_t bsize)254 extent_bsize_set(extent_t *extent, size_t bsize) {
255 	extent->e_bsize = bsize;
256 }
257 
258 static inline void
extent_szind_set(extent_t * extent,szind_t szind)259 extent_szind_set(extent_t *extent, szind_t szind) {
260 	assert(szind <= SC_NSIZES); /* SC_NSIZES means "invalid". */
261 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SZIND_MASK) |
262 	    ((uint64_t)szind << EXTENT_BITS_SZIND_SHIFT);
263 }
264 
265 static inline void
extent_nfree_set(extent_t * extent,unsigned nfree)266 extent_nfree_set(extent_t *extent, unsigned nfree) {
267 	assert(extent_slab_get(extent));
268 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_NFREE_MASK) |
269 	    ((uint64_t)nfree << EXTENT_BITS_NFREE_SHIFT);
270 }
271 
272 static inline void
extent_nfree_binshard_set(extent_t * extent,unsigned nfree,unsigned binshard)273 extent_nfree_binshard_set(extent_t *extent, unsigned nfree, unsigned binshard) {
274 	/* The assertion assumes szind is set already. */
275 	assert(binshard < bin_infos[extent_szind_get(extent)].n_shards);
276 	extent->e_bits = (extent->e_bits &
277 	    (~EXTENT_BITS_NFREE_MASK & ~EXTENT_BITS_BINSHARD_MASK)) |
278 	    ((uint64_t)binshard << EXTENT_BITS_BINSHARD_SHIFT) |
279 	    ((uint64_t)nfree << EXTENT_BITS_NFREE_SHIFT);
280 }
281 
282 static inline void
extent_nfree_inc(extent_t * extent)283 extent_nfree_inc(extent_t *extent) {
284 	assert(extent_slab_get(extent));
285 	extent->e_bits += ((uint64_t)1U << EXTENT_BITS_NFREE_SHIFT);
286 }
287 
288 static inline void
extent_nfree_dec(extent_t * extent)289 extent_nfree_dec(extent_t *extent) {
290 	assert(extent_slab_get(extent));
291 	extent->e_bits -= ((uint64_t)1U << EXTENT_BITS_NFREE_SHIFT);
292 }
293 
294 static inline void
extent_nfree_sub(extent_t * extent,uint64_t n)295 extent_nfree_sub(extent_t *extent, uint64_t n) {
296 	assert(extent_slab_get(extent));
297 	extent->e_bits -= (n << EXTENT_BITS_NFREE_SHIFT);
298 }
299 
300 static inline void
extent_sn_set(extent_t * extent,size_t sn)301 extent_sn_set(extent_t *extent, size_t sn) {
302 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SN_MASK) |
303 	    ((uint64_t)sn << EXTENT_BITS_SN_SHIFT);
304 }
305 
306 static inline void
extent_state_set(extent_t * extent,extent_state_t state)307 extent_state_set(extent_t *extent, extent_state_t state) {
308 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_STATE_MASK) |
309 	    ((uint64_t)state << EXTENT_BITS_STATE_SHIFT);
310 }
311 
312 static inline void
extent_zeroed_set(extent_t * extent,bool zeroed)313 extent_zeroed_set(extent_t *extent, bool zeroed) {
314 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_ZEROED_MASK) |
315 	    ((uint64_t)zeroed << EXTENT_BITS_ZEROED_SHIFT);
316 }
317 
318 static inline void
extent_committed_set(extent_t * extent,bool committed)319 extent_committed_set(extent_t *extent, bool committed) {
320 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_COMMITTED_MASK) |
321 	    ((uint64_t)committed << EXTENT_BITS_COMMITTED_SHIFT);
322 }
323 
324 static inline void
extent_dumpable_set(extent_t * extent,bool dumpable)325 extent_dumpable_set(extent_t *extent, bool dumpable) {
326 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_DUMPABLE_MASK) |
327 	    ((uint64_t)dumpable << EXTENT_BITS_DUMPABLE_SHIFT);
328 }
329 
330 static inline void
extent_slab_set(extent_t * extent,bool slab)331 extent_slab_set(extent_t *extent, bool slab) {
332 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_SLAB_MASK) |
333 	    ((uint64_t)slab << EXTENT_BITS_SLAB_SHIFT);
334 }
335 
336 static inline void
extent_prof_tctx_set(extent_t * extent,prof_tctx_t * tctx)337 extent_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx) {
338 	atomic_store_p(&extent->e_prof_tctx, tctx, ATOMIC_RELEASE);
339 }
340 
341 static inline void
extent_prof_alloc_time_set(extent_t * extent,nstime_t t)342 extent_prof_alloc_time_set(extent_t *extent, nstime_t t) {
343 	nstime_copy(&extent->e_alloc_time, &t);
344 }
345 
346 static inline bool
extent_is_head_get(extent_t * extent)347 extent_is_head_get(extent_t *extent) {
348 	if (maps_coalesce) {
349 		not_reached();
350 	}
351 
352 	return (bool)((extent->e_bits & EXTENT_BITS_IS_HEAD_MASK) >>
353 	    EXTENT_BITS_IS_HEAD_SHIFT);
354 }
355 
356 static inline void
extent_is_head_set(extent_t * extent,bool is_head)357 extent_is_head_set(extent_t *extent, bool is_head) {
358 	if (maps_coalesce) {
359 		not_reached();
360 	}
361 
362 	extent->e_bits = (extent->e_bits & ~EXTENT_BITS_IS_HEAD_MASK) |
363 	    ((uint64_t)is_head << EXTENT_BITS_IS_HEAD_SHIFT);
364 }
365 
366 static inline void
extent_init(extent_t * extent,arena_t * arena,void * addr,size_t size,bool slab,szind_t szind,size_t sn,extent_state_t state,bool zeroed,bool committed,bool dumpable,extent_head_state_t is_head)367 extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
368     bool slab, szind_t szind, size_t sn, extent_state_t state, bool zeroed,
369     bool committed, bool dumpable, extent_head_state_t is_head) {
370 	assert(addr == PAGE_ADDR2BASE(addr) || !slab);
371 
372 	extent_arena_set(extent, arena);
373 	extent_addr_set(extent, addr);
374 	extent_size_set(extent, size);
375 	extent_slab_set(extent, slab);
376 	extent_szind_set(extent, szind);
377 	extent_sn_set(extent, sn);
378 	extent_state_set(extent, state);
379 	extent_zeroed_set(extent, zeroed);
380 	extent_committed_set(extent, committed);
381 	extent_dumpable_set(extent, dumpable);
382 	ql_elm_new(extent, ql_link);
383 	if (!maps_coalesce) {
384 		extent_is_head_set(extent, (is_head == EXTENT_IS_HEAD) ? true :
385 		    false);
386 	}
387 	if (config_prof) {
388 		extent_prof_tctx_set(extent, NULL);
389 	}
390 }
391 
392 static inline void
extent_binit(extent_t * extent,void * addr,size_t bsize,size_t sn)393 extent_binit(extent_t *extent, void *addr, size_t bsize, size_t sn) {
394 	extent_arena_set(extent, NULL);
395 	extent_addr_set(extent, addr);
396 	extent_bsize_set(extent, bsize);
397 	extent_slab_set(extent, false);
398 	extent_szind_set(extent, SC_NSIZES);
399 	extent_sn_set(extent, sn);
400 	extent_state_set(extent, extent_state_active);
401 	extent_zeroed_set(extent, true);
402 	extent_committed_set(extent, true);
403 	extent_dumpable_set(extent, true);
404 }
405 
406 static inline void
extent_list_init(extent_list_t * list)407 extent_list_init(extent_list_t *list) {
408 	ql_new(list);
409 }
410 
411 static inline extent_t *
extent_list_first(const extent_list_t * list)412 extent_list_first(const extent_list_t *list) {
413 	return ql_first(list);
414 }
415 
416 static inline extent_t *
extent_list_last(const extent_list_t * list)417 extent_list_last(const extent_list_t *list) {
418 	return ql_last(list, ql_link);
419 }
420 
421 static inline void
extent_list_append(extent_list_t * list,extent_t * extent)422 extent_list_append(extent_list_t *list, extent_t *extent) {
423 	ql_tail_insert(list, extent, ql_link);
424 }
425 
426 static inline void
extent_list_prepend(extent_list_t * list,extent_t * extent)427 extent_list_prepend(extent_list_t *list, extent_t *extent) {
428 	ql_head_insert(list, extent, ql_link);
429 }
430 
431 static inline void
extent_list_replace(extent_list_t * list,extent_t * to_remove,extent_t * to_insert)432 extent_list_replace(extent_list_t *list, extent_t *to_remove,
433     extent_t *to_insert) {
434 	ql_after_insert(to_remove, to_insert, ql_link);
435 	ql_remove(list, to_remove, ql_link);
436 }
437 
438 static inline void
extent_list_remove(extent_list_t * list,extent_t * extent)439 extent_list_remove(extent_list_t *list, extent_t *extent) {
440 	ql_remove(list, extent, ql_link);
441 }
442 
443 static inline int
extent_sn_comp(const extent_t * a,const extent_t * b)444 extent_sn_comp(const extent_t *a, const extent_t *b) {
445 	size_t a_sn = extent_sn_get(a);
446 	size_t b_sn = extent_sn_get(b);
447 
448 	return (a_sn > b_sn) - (a_sn < b_sn);
449 }
450 
451 static inline int
extent_esn_comp(const extent_t * a,const extent_t * b)452 extent_esn_comp(const extent_t *a, const extent_t *b) {
453 	size_t a_esn = extent_esn_get(a);
454 	size_t b_esn = extent_esn_get(b);
455 
456 	return (a_esn > b_esn) - (a_esn < b_esn);
457 }
458 
459 static inline int
extent_ad_comp(const extent_t * a,const extent_t * b)460 extent_ad_comp(const extent_t *a, const extent_t *b) {
461 	uintptr_t a_addr = (uintptr_t)extent_addr_get(a);
462 	uintptr_t b_addr = (uintptr_t)extent_addr_get(b);
463 
464 	return (a_addr > b_addr) - (a_addr < b_addr);
465 }
466 
467 static inline int
extent_ead_comp(const extent_t * a,const extent_t * b)468 extent_ead_comp(const extent_t *a, const extent_t *b) {
469 	uintptr_t a_eaddr = (uintptr_t)a;
470 	uintptr_t b_eaddr = (uintptr_t)b;
471 
472 	return (a_eaddr > b_eaddr) - (a_eaddr < b_eaddr);
473 }
474 
475 static inline int
extent_snad_comp(const extent_t * a,const extent_t * b)476 extent_snad_comp(const extent_t *a, const extent_t *b) {
477 	int ret;
478 
479 	ret = extent_sn_comp(a, b);
480 	if (ret != 0) {
481 		return ret;
482 	}
483 
484 	ret = extent_ad_comp(a, b);
485 	return ret;
486 }
487 
488 static inline int
extent_esnead_comp(const extent_t * a,const extent_t * b)489 extent_esnead_comp(const extent_t *a, const extent_t *b) {
490 	int ret;
491 
492 	ret = extent_esn_comp(a, b);
493 	if (ret != 0) {
494 		return ret;
495 	}
496 
497 	ret = extent_ead_comp(a, b);
498 	return ret;
499 }
500 
501 #endif /* JEMALLOC_INTERNAL_EXTENT_INLINES_H */
502