Home
last modified time | relevance | path

Searched refs:pmdp (Results 1 – 25 of 95) sorted by relevance

1234

/linux/mm/
H A Dpgtable-generic.c112 unsigned long address, pmd_t *pmdp, in pmdp_set_access_flags() argument
115 int changed = !pmd_same(*pmdp, entry); in pmdp_set_access_flags()
118 set_pmd_at(vma->vm_mm, address, pmdp, entry); in pmdp_set_access_flags()
127 unsigned long address, pmd_t *pmdp) in pmdp_clear_flush_young() argument
131 young = pmdp_test_and_clear_young(vma, address, pmdp); in pmdp_clear_flush_young()
140 pmd_t *pmdp) in pmdp_huge_clear_flush() argument
144 VM_BUG_ON(pmd_present(*pmdp) && !pmd_trans_huge(*pmdp)); in pmdp_huge_clear_flush()
145 pmd = pmdp_huge_get_and_clear(vma->vm_mm, address, pmdp); in pmdp_huge_clear_flush()
166 void pgtable_trans_huge_deposit(struct mm_struct *mm, pmd_t *pmdp, in pgtable_trans_huge_deposit() argument
169 assert_spin_locked(pmd_lockptr(mm, pmdp)); in pgtable_trans_huge_deposit()
[all …]
H A Dmigrate_device.c136 static int migrate_vma_collect_huge_pmd(pmd_t *pmdp, unsigned long start, in migrate_vma_collect_huge_pmd() argument
147 ptl = pmd_lock(mm, pmdp); in migrate_vma_collect_huge_pmd()
148 if (pmd_none(*pmdp)) { in migrate_vma_collect_huge_pmd()
153 if (pmd_trans_huge(*pmdp)) { in migrate_vma_collect_huge_pmd()
159 folio = pmd_folio(*pmdp); in migrate_vma_collect_huge_pmd()
164 if (pmd_write(*pmdp)) in migrate_vma_collect_huge_pmd()
166 } else if (!pmd_present(*pmdp)) { in migrate_vma_collect_huge_pmd()
167 const softleaf_t entry = softleaf_from_pmd(*pmdp); in migrate_vma_collect_huge_pmd()
206 .pmd = pmdp, in migrate_vma_collect_huge_pmd()
240 if (pmd_none(pmdp_get_lockless(pmdp))) in migrate_vma_collect_huge_pmd()
[all …]
H A Ddebug_vm_pgtable.c52 pmd_t *pmdp; member
243 pgtable_trans_huge_deposit(args->mm, args->pmdp, args->start_ptep); in pmd_advanced_tests()
246 set_pmd_at(args->mm, vaddr, args->pmdp, pmd); in pmd_advanced_tests()
248 pmdp_set_wrprotect(args->mm, vaddr, args->pmdp); in pmd_advanced_tests()
249 pmd = pmdp_get(args->pmdp); in pmd_advanced_tests()
251 pmdp_huge_get_and_clear(args->mm, vaddr, args->pmdp); in pmd_advanced_tests()
252 pmd = pmdp_get(args->pmdp); in pmd_advanced_tests()
258 set_pmd_at(args->mm, vaddr, args->pmdp, pmd); in pmd_advanced_tests()
262 pmdp_set_access_flags(args->vma, vaddr, args->pmdp, pmd, 1); in pmd_advanced_tests()
263 pmd = pmdp_get(args->pmdp); in pmd_advanced_tests()
[all …]
/linux/arch/s390/mm/
H A Dpgtable.c344 unsigned long addr, pmd_t *pmdp) in pmdp_idte_local() argument
347 __pmdp_idte(addr, pmdp, IDTE_NODAT | IDTE_GUEST_ASCE, in pmdp_idte_local()
350 __pmdp_idte(addr, pmdp, 0, 0, IDTE_LOCAL); in pmdp_idte_local()
356 unsigned long addr, pmd_t *pmdp) in pmdp_idte_global() argument
359 __pmdp_idte(addr, pmdp, IDTE_NODAT | IDTE_GUEST_ASCE, in pmdp_idte_global()
364 __pmdp_idte(addr, pmdp, 0, 0, IDTE_GLOBAL); in pmdp_idte_global()
371 unsigned long addr, pmd_t *pmdp) in pmdp_flush_direct() argument
375 old = *pmdp; in pmdp_flush_direct()
381 pmdp_idte_local(mm, addr, pmdp); in pmdp_flush_direct()
383 pmdp_idte_global(mm, addr, pmdp); in pmdp_flush_direct()
[all …]
H A Dgmap.c363 pmd_t *pmdp; in __gmap_unlink_by_vmaddr() local
368 pmdp = host_to_guest_pmd_delete(gmap, vmaddr, &gaddr); in __gmap_unlink_by_vmaddr()
369 if (pmdp) { in __gmap_unlink_by_vmaddr()
370 flush = (pmd_val(*pmdp) != _SEGMENT_ENTRY_EMPTY); in __gmap_unlink_by_vmaddr()
371 *pmdp = __pmd(_SEGMENT_ENTRY_EMPTY); in __gmap_unlink_by_vmaddr()
821 pmd_t *pmdp; in gmap_pmd_op_walk() local
824 pmdp = (pmd_t *) gmap_table_walk(gmap, gaddr, 1); in gmap_pmd_op_walk()
825 if (!pmdp) in gmap_pmd_op_walk()
830 return pmd_none(*pmdp) ? NULL : pmdp; in gmap_pmd_op_walk()
833 if (pmd_none(*pmdp)) { in gmap_pmd_op_walk()
[all …]
H A Dpageattr.c86 static int walk_pte_level(pmd_t *pmdp, unsigned long addr, unsigned long end, in walk_pte_level() argument
93 ptep = pte_offset_kernel(pmdp, addr); in walk_pte_level()
120 static int split_pmd_page(pmd_t *pmdp, unsigned long addr) in split_pmd_page() argument
130 pte_addr = pmd_pfn(*pmdp) << PAGE_SHIFT; in split_pmd_page()
131 ro = !!(pmd_val(*pmdp) & _SEGMENT_ENTRY_PROTECT); in split_pmd_page()
132 nx = !!(pmd_val(*pmdp) & _SEGMENT_ENTRY_NOEXEC); in split_pmd_page()
143 pgt_set((unsigned long *)pmdp, pmd_val(new), addr, CRDTE_DTT_SEGMENT); in split_pmd_page()
149 static void modify_pmd_page(pmd_t *pmdp, unsigned long addr, in modify_pmd_page() argument
152 pmd_t new = *pmdp; in modify_pmd_page()
168 pgt_set((unsigned long *)pmdp, pmd_val(new), addr, CRDTE_DTT_SEGMENT); in modify_pmd_page()
[all …]
/linux/arch/x86/include/asm/
H A Dpgtable-3level.h48 static inline void native_set_pmd(pmd_t *pmdp, pmd_t pmd) in native_set_pmd() argument
50 pxx_xchg64(pmd, pmdp, native_pmd_val(pmd)); in native_set_pmd()
74 static inline void native_pmd_clear(pmd_t *pmdp) in native_pmd_clear() argument
76 WRITE_ONCE(pmdp->pmd_low, 0); in native_pmd_clear()
78 WRITE_ONCE(pmdp->pmd_high, 0); in native_pmd_clear()
108 static inline pmd_t native_pmdp_get_and_clear(pmd_t *pmdp) in native_pmdp_get_and_clear() argument
110 return pxx_xchg64(pmd, pmdp, 0ULL); in native_pmdp_get_and_clear()
126 unsigned long address, pmd_t *pmdp, pmd_t pmd) in pmdp_establish() argument
137 old.pmd_low = xchg(&pmdp->pmd_low, pmd.pmd_low); in pmdp_establish()
138 old.pmd_high = READ_ONCE(pmdp->pmd_high); in pmdp_establish()
[all …]
/linux/arch/powerpc/mm/book3s64/
H A Dradix_pgtable.c81 pmd_t *pmdp; in early_map_kernel_page() local
97 pmdp = early_alloc_pgtable(PAGE_SIZE, nid, region_start, in early_map_kernel_page()
99 pud_populate(&init_mm, pudp, pmdp); in early_map_kernel_page()
101 pmdp = pmd_offset(pudp, ea); in early_map_kernel_page()
103 ptep = pmdp_ptep(pmdp); in early_map_kernel_page()
106 if (!pmd_present(*pmdp)) { in early_map_kernel_page()
109 pmd_populate_kernel(&init_mm, pmdp, ptep); in early_map_kernel_page()
111 ptep = pte_offset_kernel(pmdp, ea); in early_map_kernel_page()
133 pmd_t *pmdp; in __map_kernel_page() local
162 pmdp = pmd_alloc(&init_mm, pudp, ea); in __map_kernel_page()
[all …]
H A Dpgtable.c61 pmd_t *pmdp, pmd_t entry, int dirty) in pmdp_set_access_flags() argument
65 WARN_ON(!pmd_trans_huge(*pmdp)); in pmdp_set_access_flags()
66 assert_spin_locked(pmd_lockptr(vma->vm_mm, pmdp)); in pmdp_set_access_flags()
68 changed = !pmd_same(*(pmdp), entry); in pmdp_set_access_flags()
74 __ptep_set_access_flags(vma, pmdp_ptep(pmdp), in pmdp_set_access_flags()
101 unsigned long address, pmd_t *pmdp) in pmdp_test_and_clear_young() argument
103 return __pmdp_test_and_clear_young(vma->vm_mm, address, pmdp); in pmdp_test_and_clear_young()
117 pmd_t *pmdp, pmd_t pmd) in set_pmd_at() argument
125 WARN_ON(pte_hw_valid(pmd_pte(*pmdp)) && !pte_protnone(pmd_pte(*pmdp))); in set_pmd_at()
126 assert_spin_locked(pmd_lockptr(mm, pmdp)); in set_pmd_at()
[all …]
/linux/arch/powerpc/include/asm/nohash/32/
H A Dpgalloc.h17 static inline void pmd_populate_kernel(struct mm_struct *mm, pmd_t *pmdp, in pmd_populate_kernel() argument
21 *pmdp = __pmd((unsigned long)pte | _PMD_PRESENT); in pmd_populate_kernel()
23 *pmdp = __pmd(__pa(pte) | _PMD_PRESENT); in pmd_populate_kernel()
26 static inline void pmd_populate(struct mm_struct *mm, pmd_t *pmdp, in pmd_populate() argument
30 *pmdp = __pmd((unsigned long)pte_page | _PMD_PRESENT); in pmd_populate()
32 *pmdp = __pmd(__pa(pte_page) | _PMD_USER | _PMD_PRESENT); in pmd_populate()
/linux/arch/powerpc/mm/
H A Dpgtable.c229 pmd_t *pmdp = pmd_off_k(va); in unmap_kernel_page() local
230 pte_t *ptep = pte_offset_kernel(pmdp, va); in unmap_kernel_page()
326 pmd_t *pmdp = pmd_off(mm, addr); in set_huge_pte_at() local
331 *pmdp = __pmd(pmd_val(*pmdp) | _PMD_PAGE_8M); in set_huge_pte_at()
332 *(pmdp + 1) = __pmd(pmd_val(*(pmdp + 1)) | _PMD_PAGE_8M); in set_huge_pte_at()
334 __set_huge_pte_at(pmdp, pte_offset_kernel(pmdp, 0), pte_val(pte)); in set_huge_pte_at()
335 __set_huge_pte_at(pmdp, pte_offset_kernel(pmdp + 1, 0), pte_val(pte) + SZ_4M); in set_huge_pte_at()
337 __set_huge_pte_at(pmdp, ptep, pte_val(pte)); in set_huge_pte_at()
436 pmd_t pmd, *pmdp; in __find_linux_pte() local
487 pmdp = pmd_offset(&pud, ea); in __find_linux_pte()
[all …]
H A Dpgtable_32.c42 pmd_t *pmdp = pmd_off_k(addr); in early_ioremap_init() local
45 addr += PGDIR_SIZE, ptep += PTRS_PER_PTE, pmdp++) in early_ioremap_init()
46 pmd_populate_kernel(&init_mm, pmdp, ptep); in early_ioremap_init()
57 pte_t __init *early_pte_alloc_kernel(pmd_t *pmdp, unsigned long va) in early_pte_alloc_kernel() argument
59 if (pmd_none(*pmdp)) { in early_pte_alloc_kernel()
62 pmd_populate_kernel(&init_mm, pmdp, ptep); in early_pte_alloc_kernel()
64 return pte_offset_kernel(pmdp, va); in early_pte_alloc_kernel()
/linux/arch/arm/include/asm/
H A Dpgalloc.h107 static inline void __pmd_populate(pmd_t *pmdp, phys_addr_t pte, in __pmd_populate() argument
111 pmdp[0] = __pmd(pmdval); in __pmd_populate()
113 pmdp[1] = __pmd(pmdval + 256 * sizeof(pte_t)); in __pmd_populate()
115 flush_pmd_entry(pmdp); in __pmd_populate()
125 pmd_populate_kernel(struct mm_struct *mm, pmd_t *pmdp, pte_t *ptep) in pmd_populate_kernel() argument
130 __pmd_populate(pmdp, __pa(ptep), _PAGE_KERNEL_TABLE); in pmd_populate_kernel()
134 pmd_populate(struct mm_struct *mm, pmd_t *pmdp, pgtable_t ptep) in pmd_populate() argument
144 __pmd_populate(pmdp, page_to_phys(ptep), prot); in pmd_populate()
/linux/include/linux/
H A Dpgtable.h314 unsigned long address, pmd_t *pmdp,
321 unsigned long address, pmd_t *pmdp, in pmdp_set_access_flags() argument
345 static inline pmd_t pmdp_get(pmd_t *pmdp) in pmdp_get() argument
347 return READ_ONCE(*pmdp); in pmdp_get()
391 pmd_t *pmdp) in pmdp_test_and_clear_young() argument
393 pmd_t pmd = *pmdp; in pmdp_test_and_clear_young()
398 set_pmd_at(vma->vm_mm, address, pmdp, pmd_mkold(pmd)); in pmdp_test_and_clear_young()
404 pmd_t *pmdp) in pmdp_test_and_clear_young() argument
420 unsigned long address, pmd_t *pmdp);
427 unsigned long address, pmd_t *pmdp) in pmdp_clear_flush_young() argument
[all …]
/linux/arch/powerpc/mm/kasan/
H A Dinit_book3e_64.c37 pmd_t *pmdp; in kasan_map_kernel_page() local
49 pmdp = memblock_alloc_or_panic(PMD_TABLE_SIZE, PMD_TABLE_SIZE); in kasan_map_kernel_page()
50 memcpy(pmdp, kasan_early_shadow_pmd, PMD_TABLE_SIZE); in kasan_map_kernel_page()
51 pud_populate(&init_mm, pudp, pmdp); in kasan_map_kernel_page()
53 pmdp = pmd_offset(pudp, ea); in kasan_map_kernel_page()
54 if (kasan_pte_table(*pmdp)) { in kasan_map_kernel_page()
57 pmd_populate_kernel(&init_mm, pmdp, ptep); in kasan_map_kernel_page()
59 ptep = pte_offset_kernel(pmdp, ea); in kasan_map_kernel_page()
/linux/arch/powerpc/include/asm/book3s/64/
H A Dpgtable.h788 static inline void pmd_clear(pmd_t *pmdp) in pmd_clear() argument
795 WARN_ON((pmd_val(*pmdp) & (H_PAGE_HASHPTE | _PAGE_PTE)) == (H_PAGE_HASHPTE | _PAGE_PTE)); in pmd_clear()
797 *pmdp = __pmd(0); in pmd_clear()
1083 pmd_t *pmdp, pmd_t pmd);
1115 pmd_hugepage_update(struct mm_struct *mm, unsigned long addr, pmd_t *pmdp, in pmd_hugepage_update() argument
1119 return radix__pmd_hugepage_update(mm, addr, pmdp, clr, set); in pmd_hugepage_update()
1120 return hash__pmd_hugepage_update(mm, addr, pmdp, clr, set); in pmd_hugepage_update()
1138 unsigned long addr, pmd_t *pmdp) in __pmdp_test_and_clear_young() argument
1142 if ((pmd_raw(*pmdp) & cpu_to_be64(_PAGE_ACCESSED | H_PAGE_HASHPTE)) == 0) in __pmdp_test_and_clear_young()
1144 old = pmd_hugepage_update(mm, addr, pmdp, _PAGE_ACCESSED, 0); in __pmdp_test_and_clear_young()
[all …]
H A Dhash-4k.h122 static inline char *get_hpte_slot_array(pmd_t *pmdp) in get_hpte_slot_array() argument
159 unsigned long addr, pmd_t *pmdp,
162 unsigned long address, pmd_t *pmdp);
163 extern void hash__pgtable_trans_huge_deposit(struct mm_struct *mm, pmd_t *pmdp,
165 extern pgtable_t hash__pgtable_trans_huge_withdraw(struct mm_struct *mm, pmd_t *pmdp);
167 unsigned long addr, pmd_t *pmdp);
H A Dhash-64k.h202 static inline char *get_hpte_slot_array(pmd_t *pmdp) in get_hpte_slot_array() argument
211 return *(char **)(pmdp + PTRS_PER_PMD); in get_hpte_slot_array()
272 unsigned long addr, pmd_t *pmdp,
275 unsigned long address, pmd_t *pmdp);
276 extern void hash__pgtable_trans_huge_deposit(struct mm_struct *mm, pmd_t *pmdp,
278 extern pgtable_t hash__pgtable_trans_huge_withdraw(struct mm_struct *mm, pmd_t *pmdp);
280 unsigned long addr, pmd_t *pmdp);
/linux/arch/arm64/mm/
H A Dhugetlbpage.c88 pmd_t *pmdp; in find_num_contig() local
93 pmdp = pmd_offset(pudp, addr); in find_num_contig()
94 if ((pte_t *)pmdp == ptep) { in find_num_contig()
246 pmd_t *pmdp; in huge_pte_alloc() local
261 pmdp = pmd_alloc(mm, pudp, addr); in huge_pte_alloc()
262 if (!pmdp) in huge_pte_alloc()
266 ptep = pte_alloc_huge(mm, pmdp, addr); in huge_pte_alloc()
273 pmdp = pmd_alloc(mm, pudp, addr); in huge_pte_alloc()
275 return (pte_t *)pmdp; in huge_pte_alloc()
287 pmd_t *pmdp, pmd; in huge_pte_offset() local
[all …]
H A Dkasan_init.c62 static pte_t *__init kasan_pte_offset(pmd_t *pmdp, unsigned long addr, int node, in kasan_pte_offset() argument
65 if (pmd_none(READ_ONCE(*pmdp))) { in kasan_pte_offset()
69 __pmd_populate(pmdp, pte_phys, PMD_TYPE_TABLE); in kasan_pte_offset()
72 return early ? pte_offset_kimg(pmdp, addr) in kasan_pte_offset()
73 : pte_offset_kernel(pmdp, addr); in kasan_pte_offset()
115 static void __init kasan_pte_populate(pmd_t *pmdp, unsigned long addr, in kasan_pte_populate() argument
119 pte_t *ptep = kasan_pte_offset(pmdp, addr, node, early); in kasan_pte_populate()
136 pmd_t *pmdp = kasan_pmd_offset(pudp, addr, node, early); in kasan_pmd_populate() local
140 kasan_pte_populate(pmdp, addr, next, node, early); in kasan_pmd_populate()
141 } while (pmdp++, addr = next, addr != end && pmd_none(READ_ONCE(*pmdp))); in kasan_pmd_populate()
/linux/arch/arm/mm/
H A Dkasan_init.c43 static void __init kasan_pte_populate(pmd_t *pmdp, unsigned long addr, in kasan_pte_populate() argument
47 pte_t *ptep = pte_offset_kernel(pmdp, addr); in kasan_pte_populate()
97 pmd_t *pmdp = pmd_offset(pudp, addr); in kasan_pmd_populate() local
100 if (pmd_none(*pmdp)) { in kasan_pmd_populate()
114 pmd_populate_kernel(&init_mm, pmdp, p); in kasan_pmd_populate()
115 flush_pmd_entry(pmdp); in kasan_pmd_populate()
119 kasan_pte_populate(pmdp, addr, next, early); in kasan_pmd_populate()
120 } while (pmdp++, addr = next, addr != end); in kasan_pmd_populate()
/linux/arch/s390/include/asm/
H A Dpgtable.h1024 static inline void set_pmd(pmd_t *pmdp, pmd_t pmd) in set_pmd() argument
1026 WRITE_ONCE(*pmdp, pmd); in set_pmd()
1052 static inline void pmd_clear(pmd_t *pmdp) in pmd_clear() argument
1054 set_pmd(pmdp, __pmd(_SEGMENT_ENTRY_EMPTY)); in pmd_clear()
1680 static inline void __pmdp_cspg(pmd_t *pmdp) in __pmdp_cspg() argument
1682 cspg((unsigned long *)pmdp, pmd_val(*pmdp), in __pmdp_cspg()
1683 pmd_val(*pmdp) | _SEGMENT_ENTRY_INVALID); in __pmdp_cspg()
1693 static __always_inline void __pmdp_idte(unsigned long addr, pmd_t *pmdp, in __pmdp_idte() argument
1699 sto = __pa(pmdp) - pmd_index(addr) * sizeof(pmd_t); in __pmdp_idte()
1704 : "+m" (*pmdp) in __pmdp_idte()
[all …]
/linux/arch/sparc/mm/
H A Dsrmmu.c137 void pmd_set(pmd_t *pmdp, pte_t *ptep) in pmd_set() argument
140 set_pte((pte_t *)&pmd_val(*pmdp), __pte(SRMMU_ET_PTD | ptp)); in pmd_set()
492 pmd_t *pmdp; in srmmu_mapioaddr() local
500 pmdp = pmd_offset(pudp, virt_addr); in srmmu_mapioaddr()
501 ptep = pte_offset_kernel(pmdp, virt_addr); in srmmu_mapioaddr()
531 pmd_t *pmdp; in srmmu_unmapioaddr() local
538 pmdp = pmd_offset(pudp, virt_addr); in srmmu_unmapioaddr()
539 ptep = pte_offset_kernel(pmdp, virt_addr); in srmmu_unmapioaddr()
678 pmd_t *pmdp; in srmmu_early_allocate_ptable_skeleton() local
686 pmdp = __srmmu_get_nocache( in srmmu_early_allocate_ptable_skeleton()
[all …]
/linux/arch/loongarch/mm/
H A Dkasan_init.c116 static pte_t *__init kasan_pte_offset(pmd_t *pmdp, unsigned long addr, int node, bool early) in kasan_pte_offset() argument
118 if (__pmd_none(early, pmdp_get(pmdp))) { in kasan_pte_offset()
123 pmd_populate_kernel(NULL, pmdp, (pte_t *)__va(pte_phys)); in kasan_pte_offset()
126 return pte_offset_kernel(pmdp, addr); in kasan_pte_offset()
168 static void __init kasan_pte_populate(pmd_t *pmdp, unsigned long addr, in kasan_pte_populate() argument
172 pte_t *ptep = kasan_pte_offset(pmdp, addr, node, early); in kasan_pte_populate()
187 pmd_t *pmdp = kasan_pmd_offset(pudp, addr, node, early); in kasan_pmd_populate() local
191 kasan_pte_populate(pmdp, addr, next, node, early); in kasan_pmd_populate()
192 } while (pmdp++, addr = next, addr != end && __pmd_none(early, pmdp_get(pmdp))); in kasan_pmd_populate()
/linux/arch/riscv/include/asm/
H A Dpgtable.h248 static inline void set_pmd(pmd_t *pmdp, pmd_t pmd) in set_pmd() argument
250 WRITE_ONCE(*pmdp, pmd); in set_pmd()
253 static inline void pmd_clear(pmd_t *pmdp) in pmd_clear() argument
255 set_pmd(pmdp, __pmd(0)); in pmd_clear()
596 unsigned long address, pmd_t *pmdp) in update_mmu_cache_pmd() argument
598 pte_t *ptep = (pte_t *)pmdp; in update_mmu_cache_pmd()
951 pmd_t *pmdp, pmd_t pmd) in set_pmd_at() argument
953 page_table_check_pmd_set(mm, pmdp, pmd); in set_pmd_at()
954 return __set_pte_at(mm, (pte_t *)pmdp, pmd_pte(pmd)); in set_pmd_at()
989 unsigned long address, pmd_t *pmdp, in pmdp_set_access_flags() argument
[all …]

1234