Searched refs:zero_folio (Results 1 – 2 of 2) sorted by relevance
/linux/mm/ |
H A D | huge_memory.c | 213 struct folio *zero_folio; in get_huge_zero_page() local 218 zero_folio = folio_alloc((GFP_TRANSHUGE | __GFP_ZERO) & ~__GFP_MOVABLE, in get_huge_zero_page() 220 if (!zero_folio) { in get_huge_zero_page() 225 folio_clear_large_rmappable(zero_folio); in get_huge_zero_page() 227 if (cmpxchg(&huge_zero_folio, NULL, zero_folio)) { in get_huge_zero_page() 229 folio_put(zero_folio); in get_huge_zero_page() 232 WRITE_ONCE(huge_zero_pfn, folio_pfn(zero_folio)); in get_huge_zero_page() 281 struct folio *zero_folio = xchg(&huge_zero_folio, NULL); in shrink_huge_zero_page_scan() local 282 BUG_ON(zero_folio == NULL); in shrink_huge_zero_page_scan() 284 folio_put(zero_folio); in shrink_huge_zero_page_scan() [all …]
|
/linux/fs/ |
H A D | dax.c | 1210 struct folio *zero_folio; in dax_pmd_load_hole() local 1215 zero_folio = mm_get_huge_zero_folio(vmf->vma->vm_mm); in dax_pmd_load_hole() 1217 if (unlikely(!zero_folio)) in dax_pmd_load_hole() 1220 pfn = page_to_pfn_t(&zero_folio->page); in dax_pmd_load_hole() 1240 pmd_entry = mk_pmd(&zero_folio->page, vmf->vma->vm_page_prot); in dax_pmd_load_hole() 1244 trace_dax_pmd_load_hole(inode, vmf, zero_folio, *entry); in dax_pmd_load_hole() 1250 trace_dax_pmd_load_hole_fallback(inode, vmf, zero_folio, *entry); in dax_pmd_load_hole()
|