arena.c (d1d015864103b253b3fcb2f72a0da5b0cfeb31b6) arena.c (2b06b2013c82ee7f744ac5b6a413edede3eeb8cd)
1#define JEMALLOC_ARENA_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7ssize_t opt_lg_dirty_mult = LG_DIRTY_MULT_DEFAULT;
8arena_bin_info_t arena_bin_info[NBINS];

--- 355 unchanged lines hidden (view full) ---

364
365 VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
366 LG_PAGE)), (npages << LG_PAGE));
367 memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0,
368 (npages << LG_PAGE));
369}
370
371static inline void
1#define JEMALLOC_ARENA_C_
2#include "jemalloc/internal/jemalloc_internal.h"
3
4/******************************************************************************/
5/* Data. */
6
7ssize_t opt_lg_dirty_mult = LG_DIRTY_MULT_DEFAULT;
8arena_bin_info_t arena_bin_info[NBINS];

--- 355 unchanged lines hidden (view full) ---

364
365 VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
366 LG_PAGE)), (npages << LG_PAGE));
367 memset((void *)((uintptr_t)chunk + (run_ind << LG_PAGE)), 0,
368 (npages << LG_PAGE));
369}
370
371static inline void
372arena_run_page_mark_zeroed(arena_chunk_t *chunk, size_t run_ind)
373{
374
375 VALGRIND_MAKE_MEM_DEFINED((void *)((uintptr_t)chunk + (run_ind <<
376 LG_PAGE)), PAGE);
377}
378
379static inline void
372arena_run_page_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
373{
374 size_t i;
375 UNUSED size_t *p = (size_t *)((uintptr_t)chunk + (run_ind << LG_PAGE));
376
380arena_run_page_validate_zeroed(arena_chunk_t *chunk, size_t run_ind)
381{
382 size_t i;
383 UNUSED size_t *p = (size_t *)((uintptr_t)chunk + (run_ind << LG_PAGE));
384
377 VALGRIND_MAKE_MEM_DEFINED((void *)((uintptr_t)chunk + (run_ind <<
378 LG_PAGE)), PAGE);
385 arena_run_page_mark_zeroed(chunk, run_ind);
379 for (i = 0; i < PAGE / sizeof(size_t); i++)
380 assert(p[i] == 0);
381}
382
383static void
384arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
385 size_t binind, bool zero)
386{

--- 66 unchanged lines hidden (view full) ---

453 for (i = 0; i < need_pages; i++) {
454 if (arena_mapbits_unzeroed_get(chunk,
455 run_ind+i) != 0) {
456 arena_run_zero(chunk, run_ind+i,
457 1);
458 } else if (config_debug) {
459 arena_run_page_validate_zeroed(
460 chunk, run_ind+i);
386 for (i = 0; i < PAGE / sizeof(size_t); i++)
387 assert(p[i] == 0);
388}
389
390static void
391arena_run_split(arena_t *arena, arena_run_t *run, size_t size, bool large,
392 size_t binind, bool zero)
393{

--- 66 unchanged lines hidden (view full) ---

460 for (i = 0; i < need_pages; i++) {
461 if (arena_mapbits_unzeroed_get(chunk,
462 run_ind+i) != 0) {
463 arena_run_zero(chunk, run_ind+i,
464 1);
465 } else if (config_debug) {
466 arena_run_page_validate_zeroed(
467 chunk, run_ind+i);
468 } else {
469 arena_run_page_mark_zeroed(
470 chunk, run_ind+i);
461 }
462 }
463 } else {
464 /*
465 * The run is dirty, so all pages must be
466 * zeroed.
467 */
468 arena_run_zero(chunk, run_ind, need_pages);
469 }
471 }
472 }
473 } else {
474 /*
475 * The run is dirty, so all pages must be
476 * zeroed.
477 */
478 arena_run_zero(chunk, run_ind, need_pages);
479 }
480 } else {
481 VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk +
482 (run_ind << LG_PAGE)), (need_pages << LG_PAGE));
470 }
471
472 /*
473 * Set the last element first, in case the run only contains one
474 * page (i.e. both statements set the same element).
475 */
476 arena_mapbits_large_set(chunk, run_ind+need_pages-1, 0,
477 flag_dirty);

--- 25 unchanged lines hidden (view full) ---

503 arena_mapbits_small_set(chunk, run_ind+need_pages-1,
504 need_pages-1, binind, flag_dirty);
505 if (config_debug && flag_dirty == 0 &&
506 arena_mapbits_unzeroed_get(chunk, run_ind+need_pages-1) ==
507 0) {
508 arena_run_page_validate_zeroed(chunk,
509 run_ind+need_pages-1);
510 }
483 }
484
485 /*
486 * Set the last element first, in case the run only contains one
487 * page (i.e. both statements set the same element).
488 */
489 arena_mapbits_large_set(chunk, run_ind+need_pages-1, 0,
490 flag_dirty);

--- 25 unchanged lines hidden (view full) ---

516 arena_mapbits_small_set(chunk, run_ind+need_pages-1,
517 need_pages-1, binind, flag_dirty);
518 if (config_debug && flag_dirty == 0 &&
519 arena_mapbits_unzeroed_get(chunk, run_ind+need_pages-1) ==
520 0) {
521 arena_run_page_validate_zeroed(chunk,
522 run_ind+need_pages-1);
523 }
524 VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk +
525 (run_ind << LG_PAGE)), (need_pages << LG_PAGE));
511 }
526 }
512 VALGRIND_MAKE_MEM_UNDEFINED((void *)((uintptr_t)chunk + (run_ind <<
513 LG_PAGE)), (need_pages << LG_PAGE));
514}
515
516static arena_chunk_t *
517arena_chunk_alloc(arena_t *arena)
518{
519 arena_chunk_t *chunk;
520 size_t i;
521

--- 42 unchanged lines hidden (view full) ---

564 unzeroed = zero ? 0 : CHUNK_MAP_UNZEROED;
565 arena_mapbits_unallocated_set(chunk, map_bias, arena_maxclass,
566 unzeroed);
567 /*
568 * There is no need to initialize the internal page map entries
569 * unless the chunk is not zeroed.
570 */
571 if (zero == false) {
527}
528
529static arena_chunk_t *
530arena_chunk_alloc(arena_t *arena)
531{
532 arena_chunk_t *chunk;
533 size_t i;
534

--- 42 unchanged lines hidden (view full) ---

577 unzeroed = zero ? 0 : CHUNK_MAP_UNZEROED;
578 arena_mapbits_unallocated_set(chunk, map_bias, arena_maxclass,
579 unzeroed);
580 /*
581 * There is no need to initialize the internal page map entries
582 * unless the chunk is not zeroed.
583 */
584 if (zero == false) {
585 VALGRIND_MAKE_MEM_UNDEFINED(
586 (void *)arena_mapp_get(chunk, map_bias+1),
587 (size_t)((uintptr_t) arena_mapp_get(chunk,
588 chunk_npages-1) - (uintptr_t)arena_mapp_get(chunk,
589 map_bias+1)));
572 for (i = map_bias+1; i < chunk_npages-1; i++)
573 arena_mapbits_unzeroed_set(chunk, i, unzeroed);
590 for (i = map_bias+1; i < chunk_npages-1; i++)
591 arena_mapbits_unzeroed_set(chunk, i, unzeroed);
574 } else if (config_debug) {
592 } else {
575 VALGRIND_MAKE_MEM_DEFINED(
576 (void *)arena_mapp_get(chunk, map_bias+1),
593 VALGRIND_MAKE_MEM_DEFINED(
594 (void *)arena_mapp_get(chunk, map_bias+1),
577 (void *)((uintptr_t)
578 arena_mapp_get(chunk, chunk_npages-1)
579 - (uintptr_t)arena_mapp_get(chunk, map_bias+1)));
580 for (i = map_bias+1; i < chunk_npages-1; i++) {
581 assert(arena_mapbits_unzeroed_get(chunk, i) ==
582 unzeroed);
595 (size_t)((uintptr_t) arena_mapp_get(chunk,
596 chunk_npages-1) - (uintptr_t)arena_mapp_get(chunk,
597 map_bias+1)));
598 if (config_debug) {
599 for (i = map_bias+1; i < chunk_npages-1; i++) {
600 assert(arena_mapbits_unzeroed_get(chunk,
601 i) == unzeroed);
602 }
583 }
584 }
585 arena_mapbits_unallocated_set(chunk, chunk_npages-1,
586 arena_maxclass, unzeroed);
587 }
588
589 /* Insert the run into the runs_avail tree. */
590 arena_avail_insert(arena, chunk, map_bias, chunk_npages-map_bias,

--- 862 unchanged lines hidden (view full) ---

1453 if (zero == false) {
1454 if (config_fill) {
1455 if (opt_junk) {
1456 arena_alloc_junk_small(ret,
1457 &arena_bin_info[binind], false);
1458 } else if (opt_zero)
1459 memset(ret, 0, size);
1460 }
603 }
604 }
605 arena_mapbits_unallocated_set(chunk, chunk_npages-1,
606 arena_maxclass, unzeroed);
607 }
608
609 /* Insert the run into the runs_avail tree. */
610 arena_avail_insert(arena, chunk, map_bias, chunk_npages-map_bias,

--- 862 unchanged lines hidden (view full) ---

1473 if (zero == false) {
1474 if (config_fill) {
1475 if (opt_junk) {
1476 arena_alloc_junk_small(ret,
1477 &arena_bin_info[binind], false);
1478 } else if (opt_zero)
1479 memset(ret, 0, size);
1480 }
1481 VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
1461 } else {
1462 if (config_fill && opt_junk) {
1463 arena_alloc_junk_small(ret, &arena_bin_info[binind],
1464 true);
1465 }
1466 VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
1467 memset(ret, 0, size);
1468 }
1482 } else {
1483 if (config_fill && opt_junk) {
1484 arena_alloc_junk_small(ret, &arena_bin_info[binind],
1485 true);
1486 }
1487 VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
1488 memset(ret, 0, size);
1489 }
1469 VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
1470
1471 return (ret);
1472}
1473
1474void *
1475arena_malloc_large(arena_t *arena, size_t size, bool zero)
1476{
1477 void *ret;

--- 888 unchanged lines hidden ---
1490
1491 return (ret);
1492}
1493
1494void *
1495arena_malloc_large(arena_t *arena, size_t size, bool zero)
1496{
1497 void *ret;

--- 888 unchanged lines hidden ---