Lines Matching refs:allocator

1478 void __kmpc_destroy_allocator(int gtid, omp_allocator_handle_t allocator) {  in __kmpc_destroy_allocator()  argument
1479 if (allocator > kmp_max_mem_alloc) in __kmpc_destroy_allocator()
1480 __kmp_free(allocator); in __kmpc_destroy_allocator()
1483 void __kmpc_set_default_allocator(int gtid, omp_allocator_handle_t allocator) { in __kmpc_set_default_allocator() argument
1484 if (allocator == omp_null_allocator) in __kmpc_set_default_allocator()
1485 allocator = omp_default_mem_alloc; in __kmpc_set_default_allocator()
1486 __kmp_threads[gtid]->th.th_def_allocator = allocator; in __kmpc_set_default_allocator()
1498 kmp_allocator_t *allocator; // allocator member
1503 void *__kmpc_alloc(int gtid, size_t size, omp_allocator_handle_t allocator) { in __kmpc_alloc() argument
1504 KE_TRACE(25, ("__kmpc_alloc: T#%d (%d, %p)\n", gtid, (int)size, allocator)); in __kmpc_alloc()
1505 void *ptr = __kmp_alloc(gtid, 0, size, allocator); in __kmpc_alloc()
1511 omp_allocator_handle_t allocator) { in __kmpc_aligned_alloc() argument
1513 (int)size, allocator)); in __kmpc_aligned_alloc()
1514 void *ptr = __kmp_alloc(gtid, algn, size, allocator); in __kmpc_aligned_alloc()
1520 omp_allocator_handle_t allocator) { in __kmpc_calloc() argument
1522 (int)size, allocator)); in __kmpc_calloc()
1523 void *ptr = __kmp_calloc(gtid, 0, nmemb, size, allocator); in __kmpc_calloc()
1529 omp_allocator_handle_t allocator, in __kmpc_realloc() argument
1532 allocator, free_allocator)); in __kmpc_realloc()
1533 void *nptr = __kmp_realloc(gtid, ptr, size, allocator, free_allocator); in __kmpc_realloc()
1538 void __kmpc_free(int gtid, void *ptr, omp_allocator_handle_t allocator) { in __kmpc_free() argument
1539 KE_TRACE(25, ("__kmpc_free: T#%d free(%p,%p)\n", gtid, ptr, allocator)); in __kmpc_free()
1540 ___kmpc_free(gtid, ptr, allocator); in __kmpc_free()
1541 KE_TRACE(10, ("__kmpc_free: T#%d freed %p (%p)\n", gtid, ptr, allocator)); in __kmpc_free()
1547 omp_allocator_handle_t allocator) { in __kmp_alloc() argument
1553 if (allocator == omp_null_allocator) in __kmp_alloc()
1554 allocator = __kmp_threads[gtid]->th.th_def_allocator; in __kmp_alloc()
1558 al = RCAST(kmp_allocator_t *, allocator); in __kmp_alloc()
1566 if (allocator > kmp_max_mem_alloc && al->alignment > align) in __kmp_alloc()
1573 if (allocator > kmp_max_mem_alloc) in __kmp_alloc()
1579 if (KMP_IS_TARGET_MEM_ALLOC(allocator)) { in __kmp_alloc()
1585 if (allocator == llvm_omp_target_host_mem_alloc) in __kmp_alloc()
1587 else if (allocator == llvm_omp_target_shared_mem_alloc) in __kmp_alloc()
1597 if (allocator >= kmp_max_mem_alloc && KMP_IS_TARGET_MEM_SPACE(al->memspace)) { in __kmp_alloc()
1614 if (allocator < kmp_max_mem_alloc) { in __kmp_alloc()
1616 if (allocator == omp_high_bw_mem_alloc && mk_hbw_preferred) { in __kmp_alloc()
1618 } else if (allocator == omp_large_cap_mem_alloc && mk_dax_kmem_all) { in __kmp_alloc()
1681 } else if (allocator < kmp_max_mem_alloc) { in __kmp_alloc()
1683 if (allocator == omp_high_bw_mem_alloc) { in __kmp_alloc()
1685 } else if (allocator == omp_large_cap_mem_alloc) { in __kmp_alloc()
1687 } else if (allocator == omp_const_mem_alloc) { in __kmp_alloc()
1689 } else if (allocator == omp_low_lat_mem_alloc) { in __kmp_alloc()
1691 } else if (allocator == omp_cgroup_mem_alloc) { in __kmp_alloc()
1693 } else if (allocator == omp_pteam_mem_alloc) { in __kmp_alloc()
1695 } else if (allocator == omp_thread_mem_alloc) { in __kmp_alloc()
1751 desc.allocator = al; in __kmp_alloc()
1759 omp_allocator_handle_t allocator) { in __kmp_calloc() argument
1764 if (allocator == omp_null_allocator) in __kmp_calloc()
1765 allocator = __kmp_threads[gtid]->th.th_def_allocator; in __kmp_calloc()
1767 al = RCAST(kmp_allocator_t *, allocator); in __kmp_calloc()
1779 ptr = __kmp_alloc(gtid, algn, nmemb * size, allocator); in __kmp_calloc()
1788 omp_allocator_handle_t allocator, in __kmp_realloc() argument
1799 nptr = __kmp_alloc(gtid, 0, size, allocator); in __kmp_realloc()
1824 void ___kmpc_free(int gtid, void *ptr, omp_allocator_handle_t allocator) { in ___kmpc_free() argument
1830 al = RCAST(kmp_allocator_t *, CCAST(omp_allocator_handle_t, allocator)); in ___kmpc_free()
1834 if (__kmp_target_mem_available && (KMP_IS_TARGET_MEM_ALLOC(allocator) || in ___kmpc_free()
1835 (allocator > kmp_max_mem_alloc && in ___kmpc_free()
1839 if (allocator == llvm_omp_target_host_mem_alloc) { in ___kmpc_free()
1841 } else if (allocator == llvm_omp_target_shared_mem_alloc) { in ___kmpc_free()
1843 } else if (allocator == llvm_omp_target_device_mem_alloc) { in ___kmpc_free()
1854 if (allocator) { in ___kmpc_free()
1855 KMP_DEBUG_ASSERT(desc.allocator == al || desc.allocator == al->fb_data); in ___kmpc_free()
1857 al = desc.allocator; in ___kmpc_free()
1861 if (allocator > kmp_max_mem_alloc && kmp_target_unlock_mem && al->pinned) { in ___kmpc_free()