Searched refs:__GFP_THISNODE (Results 1 – 18 of 18) sorted by relevance
153 if (unlikely(flags & __GFP_THISNODE)) in gfp_zonelist()247 gfp_t warn_gfp = gfp_mask & (__GFP_THISNODE|__GFP_NOWARN); in warn_if_node_offline()249 if (warn_gfp != (__GFP_THISNODE|__GFP_NOWARN)) in warn_if_node_offline()
152 #define __GFP_THISNODE ((__force gfp_t)___GFP_THISNODE) macro
914 modified_mask |= (gfp_mask & __GFP_THISNODE); in htlb_modify_alloc_mask()
123 GFP_KERNEL|__GFP_THISNODE, in cbe_ptcal_enable_on_node()
121 page = alloc_contig_pages(nr_pages, GFP_KERNEL | __GFP_THISNODE | in memtrace_alloc_node()
45 gfpflag_string(__GFP_THISNODE), \
615 GFP_KERNEL | __GFP_ZERO | __GFP_THISNODE | in core_imc_mem_init()932 GFP_KERNEL | __GFP_ZERO | __GFP_THISNODE | in thread_imc_mem_alloc()1215 GFP_KERNEL | __GFP_ZERO | __GFP_THISNODE | in trace_imc_mem_alloc()
1072 .gfp_mask = GFP_HIGHUSER_MOVABLE | __GFP_THISNODE, in migrate_to_node()2072 WARN_ON_ONCE(gfp & __GFP_THISNODE); in policy_nodemask()2253 gfp | __GFP_THISNODE | __GFP_NORETRY, order); in alloc_pages_mpol_noprof()2342 if (!in_interrupt() && !(gfp & __GFP_THISNODE)) in alloc_pages_noprof()2539 if (!in_interrupt() && !(gfp & __GFP_THISNODE)) in alloc_pages_bulk_array_mempolicy_noprof()
278 if (oc->gfp_mask & __GFP_THISNODE) in constrained_alloc() 282 * This is not a __GFP_THISNODE allocation, so a truncated nodemask in in constrained_alloc()
41 #define GFP_CONSTRAINT_MASK (__GFP_HARDWALL|__GFP_THISNODE)
2183 .gfp_mask = GFP_HIGHUSER_MOVABLE | __GFP_THISNODE, in do_move_pages_to_node()2603 gfp_t gfp = __GFP_THISNODE; in alloc_misplaced_dst_folio()
1536 if (!folio && !(gfp_mask & __GFP_THISNODE)) { in alloc_gigantic_folio()2130 gfp_t gfp_mask = htlb_alloc_mask(h) | __GFP_THISNODE; in alloc_pool_huge_folio()2429 gfp_mask |= __GFP_THISNODE; in alloc_hugetlb_folio_nodemask()2850 gfp_t gfp_mask = htlb_alloc_mask(h) | __GFP_THISNODE; in alloc_and_dissolve_hugetlb_folio()3311 gfp_t gfp_mask = htlb_alloc_mask(h) | __GFP_THISNODE; in hugetlb_hstate_alloc_pages_onenode()
2941 if (slab || (node != NUMA_NO_NODE && (pc->flags & __GFP_THISNODE))) in get_partial() 3783 * When a preferred node is indicated but no __GFP_THISNODE in ___slab_alloc() 3786 * __GFP_THISNODE in pc.flags for get_partial() in ___slab_alloc() 3788 * GPF_NOWAIT | __GFP_THISNODE opportunistically in ___slab_alloc() 3793 if (unlikely(node != NUMA_NO_NODE && !(gfpflags & __GFP_THISNODE) in ___slab_alloc() 3795 pc.flags = GFP_NOWAIT | __GFP_THISNODE; in ___slab_alloc() 3822 if (node != NUMA_NO_NODE && !(gfpflags & __GFP_THISNODE) in ___slab_alloc() 4180 * Fallback to other node is possible if __GFP_THISNODE is not set.
972 mtc->gfp_mask |= __GFP_THISNODE; in alloc_migrate_folio()977 mtc->gfp_mask &= ~__GFP_THISNODE; in alloc_migrate_folio()
3609 if (gfp_mask & (__GFP_RETRY_MAYFAIL | __GFP_THISNODE)) in __alloc_pages_may_oom()
94 __GFP_THISNODE; in ttm_pool_alloc_page()
97 __GFP_THISNODE | in ovs_flow_stats_update()
175 GFP_KERNEL | __GFP_ZERO | __GFP_THISNODE, in xpc_create_gru_mq_uv()