Home
last modified time | relevance | path

Searched refs:vm_fault (Results 1 – 25 of 142) sorted by relevance

123456

/linux/include/linux/
H A Dhuge_mm.h10 vm_fault_t do_huge_pmd_anonymous_page(struct vm_fault *vmf);
14 bool huge_pmd_set_accessed(struct vm_fault *vmf);
20 void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud);
22 static inline void huge_pud_set_accessed(struct vm_fault *vmf, pud_t orig_pud) in huge_pud_set_accessed()
27 vm_fault_t do_huge_pmd_wp_page(struct vm_fault *vmf);
40 vm_fault_t vmf_insert_pfn_pmd(struct vm_fault *vmf, unsigned long pfn,
42 vm_fault_t vmf_insert_pfn_pud(struct vm_fault *vmf, unsigned long pfn,
44 vm_fault_t vmf_insert_folio_pmd(struct vm_fault *vmf, struct folio *folio,
46 vm_fault_t vmf_insert_folio_pud(struct vm_fault *vmf, struct folio *folio,
521 vm_fault_t do_huge_pmd_numa_page(struct vm_fault *vmf);
[all …]
H A Dmm.h689 struct vm_fault { struct
757 vm_fault_t (*fault)(struct vm_fault *vmf);
758 vm_fault_t (*huge_fault)(struct vm_fault *vmf, unsigned int order);
759 vm_fault_t (*map_pages)(struct vm_fault *vmf,
765 vm_fault_t (*page_mkwrite)(struct vm_fault *vmf);
768 vm_fault_t (*pfn_mkwrite)(struct vm_fault *vmf);
841 static inline void release_fault_lock(struct vm_fault *vmf) in release_fault_lock()
849 static inline void assert_fault_locked(const struct vm_fault *vmf) in assert_fault_locked()
857 static inline void release_fault_lock(struct vm_fault *vmf) in release_fault_lock()
862 static inline void assert_fault_locked(const struct vm_fault *vmf) in assert_fault_locked()
[all …]
H A Dmempolicy.h171 int mpol_misplaced(struct folio *folio, struct vm_fault *vmf,
290 struct vm_fault *vmf, in mpol_misplaced()
H A Duserfaultfd_k.h81 extern vm_fault_t handle_userfault(struct vm_fault *vmf, unsigned long reason);
333 static inline vm_fault_t handle_userfault(struct vm_fault *vmf, in handle_userfault()
/linux/include/trace/events/
H A Dfs_dax.h11 TP_PROTO(struct inode *inode, struct vm_fault *vmf,
56 TP_PROTO(struct inode *inode, struct vm_fault *vmf, \
64 TP_PROTO(struct inode *inode, struct vm_fault *vmf,
98 TP_PROTO(struct inode *inode, struct vm_fault *vmf, \
106 TP_PROTO(struct inode *inode, struct vm_fault *vmf, int result),
140 TP_PROTO(struct inode *inode, struct vm_fault *vmf, int result), \
/linux/drivers/dax/
H A Ddevice.c83 static void dax_set_mapping(struct vm_fault *vmf, unsigned long pfn, in dax_set_mapping()
110 struct vm_fault *vmf) in __dev_dax_pte_fault()
144 struct vm_fault *vmf) in __dev_dax_pmd_fault()
189 struct vm_fault *vmf) in __dev_dax_pud_fault()
234 struct vm_fault *vmf) in __dev_dax_pud_fault()
240 static vm_fault_t dev_dax_huge_fault(struct vm_fault *vmf, unsigned int order) in dev_dax_huge_fault()
266 static vm_fault_t dev_dax_fault(struct vm_fault *vmf) in dev_dax_fault()
/linux/drivers/gpu/drm/ttm/
H A Dttm_bo_vm.c44 struct vm_fault *vmf) in ttm_bo_vm_fault_idle()
119 struct vm_fault *vmf) in ttm_bo_vm_reserve()
183 vm_fault_t ttm_bo_vm_fault_reserved(struct vm_fault *vmf, in ttm_bo_vm_fault_reserved()
292 vm_fault_t ttm_bo_vm_dummy_page(struct vm_fault *vmf, pgprot_t prot) in ttm_bo_vm_dummy_page()
322 vm_fault_t ttm_bo_vm_fault(struct vm_fault *vmf) in ttm_bo_vm_fault()
/linux/mm/
H A Dmemory.c97 static vm_fault_t do_fault(struct vm_fault *vmf);
98 static vm_fault_t do_anonymous_page(struct vm_fault *vmf);
99 static bool vmf_pte_changed(struct vm_fault *vmf);
105 static __always_inline bool vmf_orig_pte_uffd_wp(struct vm_fault *vmf) in vmf_orig_pte_uffd_wp()
2762 vm_fault_t vmf_insert_page_mkwrite(struct vm_fault *vmf, struct page *page, in vmf_insert_page_mkwrite()
3378 static inline int pte_unmap_same(struct vm_fault *vmf) in pte_unmap_same()
3400 struct vm_fault *vmf) in __wp_page_copy_user()
3517 static vm_fault_t do_page_mkwrite(struct vm_fault *vmf, struct folio *folio) in do_page_mkwrite()
3550 static vm_fault_t fault_dirty_shared_page(struct vm_fault *vmf) in fault_dirty_shared_page()
3603 static inline void wp_page_reuse(struct vm_fault *vmf, struct folio *folio) in wp_page_reuse()
[all …]
H A Dswap.h270 struct vm_fault *vmf);
384 struct vm_fault *vmf) in swapin_readahead()
/linux/arch/arc/include/asm/
H A Dpgtable-bits-arcv2.h103 struct vm_fault;
104 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,
/linux/fs/ocfs2/
H A Dmmap.c31 static vm_fault_t ocfs2_fault(struct vm_fault *vmf) in ocfs2_fault()
113 static vm_fault_t ocfs2_page_mkwrite(struct vm_fault *vmf) in ocfs2_page_mkwrite()
/linux/fs/
H A Ddax.c1003 static int copy_cow_page_dax(struct vm_fault *vmf, const struct iomap_iter *iter) in copy_cow_page_dax()
1042 static void *dax_insert_entry(struct xa_state *xas, struct vm_fault *vmf, in dax_insert_entry()
1358 static vm_fault_t dax_load_hole(struct xa_state *xas, struct vm_fault *vmf, in dax_load_hole()
1374 static vm_fault_t dax_pmd_load_hole(struct xa_state *xas, struct vm_fault *vmf, in dax_pmd_load_hole()
1398 static vm_fault_t dax_pmd_load_hole(struct xa_state *xas, struct vm_fault *vmf, in dax_pmd_load_hole()
1766 static vm_fault_t dax_fault_cow_page(struct vm_fault *vmf, in dax_fault_cow_page()
1805 static vm_fault_t dax_fault_iter(struct vm_fault *vmf, in dax_fault_iter()
1862 static vm_fault_t dax_iomap_pte_fault(struct vm_fault *vmf, unsigned long *pfnp, in dax_iomap_pte_fault()
1939 static bool dax_fault_check_fallback(struct vm_fault *vmf, struct xa_state *xas, in dax_fault_check_fallback()
1972 static vm_fault_t dax_iomap_pmd_fault(struct vm_fault *vmf, unsigned long *pfnp, in dax_iomap_pmd_fault()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dgmc_v12_0.c188 adev->gmc.vm_fault.num_types = 1; in gmc_v12_0_set_irq_funcs()
189 adev->gmc.vm_fault.funcs = &gmc_v12_0_irq_funcs; in gmc_v12_0_set_irq_funcs()
676 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v12_0_late_init()
807 &adev->gmc.vm_fault); in gmc_v12_0_sw_init()
814 &adev->gmc.vm_fault); in gmc_v12_0_sw_init()
976 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v12_0_hw_fini()
H A Dgmc_v11_0.c195 adev->gmc.vm_fault.num_types = 1; in gmc_v11_0_set_irq_funcs()
196 adev->gmc.vm_fault.funcs = &gmc_v11_0_irq_funcs; in gmc_v11_0_set_irq_funcs()
664 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v11_0_late_init()
814 &adev->gmc.vm_fault); in gmc_v11_0_sw_init()
821 &adev->gmc.vm_fault); in gmc_v11_0_sw_init()
991 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v11_0_hw_fini()
H A Dgmc_v10_0.c199 adev->gmc.vm_fault.num_types = 1; in gmc_v10_0_set_irq_funcs()
200 adev->gmc.vm_fault.funcs = &gmc_v10_0_irq_funcs; in gmc_v10_0_set_irq_funcs()
664 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v10_0_late_init()
843 &adev->gmc.vm_fault); in gmc_v10_0_sw_init()
850 &adev->gmc.vm_fault); in gmc_v10_0_sw_init()
1043 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v10_0_hw_fini()
H A Dgmc_v6_0.c790 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v6_0_late_init()
828 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 146, &adev->gmc.vm_fault); in gmc_v6_0_sw_init()
832 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 147, &adev->gmc.vm_fault); in gmc_v6_0_sw_init()
931 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v6_0_hw_fini()
1170 adev->gmc.vm_fault.num_types = 1; in gmc_v6_0_set_irq_funcs()
1171 adev->gmc.vm_fault.funcs = &gmc_v6_0_irq_funcs; in gmc_v6_0_set_irq_funcs()
H A Dgmc_v7_0.c961 return amdgpu_irq_get(adev, &adev->gmc.vm_fault, 0); in gmc_v7_0_late_init()
1000 …_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_GFX_PAGE_INV_FAULT, &adev->gmc.vm_fault); in gmc_v7_0_sw_init()
1004 …_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_GFX_MEM_PROT_FAULT, &adev->gmc.vm_fault); in gmc_v7_0_sw_init()
1121 amdgpu_irq_put(adev, &adev->gmc.vm_fault, 0); in gmc_v7_0_hw_fini()
1387 adev->gmc.vm_fault.num_types = 1; in gmc_v7_0_set_irq_funcs()
1388 adev->gmc.vm_fault.funcs = &gmc_v7_0_irq_funcs; in gmc_v7_0_set_irq_funcs()
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
H A Dnv50.c121 static const struct nvkm_enum vm_fault[] = { variable
174 re = nvkm_enum_find(vm_fault , st1); in nv50_fb_intr()
/linux/arch/hexagon/mm/
H A DMakefile6 obj-y := init.o uaccess.o vm_fault.o cache.o
/linux/arch/x86/entry/vdso/
H A Dvma.c53 struct vm_area_struct *vma, struct vm_fault *vmf) in vdso_fault()
92 struct vm_area_struct *vma, struct vm_fault *vmf) in vvar_vclock_fault()
/linux/arch/hexagon/include/asm/
H A Dcacheflush.h61 static inline void update_mmu_cache_range(struct vm_fault *vmf, in update_mmu_cache_range()
/linux/arch/csky/abiv1/
H A Dcacheflush.c44 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range()
/linux/tools/testing/vma/
H A Dvma_internal.h745 struct vm_fault {}; struct
764 vm_fault_t (*fault)(struct vm_fault *vmf);
765 vm_fault_t (*huge_fault)(struct vm_fault *vmf, unsigned int order);
766 vm_fault_t (*map_pages)(struct vm_fault *vmf,
772 vm_fault_t (*page_mkwrite)(struct vm_fault *vmf);
775 vm_fault_t (*pfn_mkwrite)(struct vm_fault *vmf);
/linux/arch/csky/abiv2/
H A Dcacheflush.c10 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, in update_mmu_cache_range()
/linux/arch/xtensa/include/asm/
H A Dpgtable.h398 struct vm_fault;
399 void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma,

123456