| /linux/mm/ |
| H A D | userfaultfd.c | 148 static struct vm_area_struct *uffd_mfill_lock(struct mm_struct *dst_mm, in uffd_mfill_lock() argument 154 dst_vma = uffd_lock_vma(dst_mm, dst_start); in uffd_mfill_lock() 169 static struct vm_area_struct *uffd_mfill_lock(struct mm_struct *dst_mm, in uffd_mfill_lock() argument 175 mmap_read_lock(dst_mm); in uffd_mfill_lock() 176 dst_vma = find_vma_and_prepare_anon(dst_mm, dst_start); in uffd_mfill_lock() 185 mmap_read_unlock(dst_mm); in uffd_mfill_lock() 290 struct mm_struct *dst_mm = state->ctx->mm; in mfill_establish_pmd() local 293 dst_pmd = mm_alloc_pmd(dst_mm, state->dst_addr); in mfill_establish_pmd() 299 unlikely(__pte_alloc(dst_mm, dst_pmd))) in mfill_establish_pmd() 345 struct mm_struct *dst_mm = dst_vma->vm_mm; in mfill_atomic_install_pte() local [all …]
|
| H A D | huge_memory.c | 1871 struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_huge_non_present_pmd() argument 1918 add_mm_counter(dst_mm, MM_ANONPAGES, HPAGE_PMD_NR); in copy_huge_non_present_pmd() 1919 mm_inc_nr_ptes(dst_mm); in copy_huge_non_present_pmd() 1920 pgtable_trans_huge_deposit(dst_mm, dst_pmd, pgtable); in copy_huge_non_present_pmd() 1923 set_pmd_at(dst_mm, addr, dst_pmd, pmd); in copy_huge_non_present_pmd() 1926 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_huge_pmd() argument 1940 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd() 1960 pgtable = pte_alloc_one(dst_mm); in copy_huge_pmd() 1964 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd() 1973 copy_huge_non_present_pmd(dst_mm, src_mm, dst_pmd, src_pmd, addr, in copy_huge_pmd() [all …]
|
| H A D | memory.c | 925 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_nonpresent_pte() argument 941 if (unlikely(list_empty(&dst_mm->mmlist))) { in copy_nonpresent_pte() 943 if (list_empty(&dst_mm->mmlist)) in copy_nonpresent_pte() 944 list_add(&dst_mm->mmlist, in copy_nonpresent_pte() 1024 set_pte_at(dst_mm, addr, dst_pte, in copy_nonpresent_pte() 1030 set_pte_at(dst_mm, addr, dst_pte, pte); in copy_nonpresent_pte() 1212 struct mm_struct *dst_mm = dst_vma->vm_mm; in copy_pte_range() local 1237 dst_pte = pte_alloc_map_lock(dst_mm, dst_pmd, addr, &dst_ptl); in copy_pte_range() 1280 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range() 1330 add_mm_rss_vec(dst_mm, rss); in copy_pte_range() [all …]
|
| H A D | hugetlb.c | 6185 struct mm_struct *dst_mm = dst_vma->vm_mm; in hugetlb_mfill_atomic_pte() local 6201 ptl = huge_pte_lock(h, dst_mm, dst_pte); in hugetlb_mfill_atomic_pte() 6204 if (!huge_pte_none(huge_ptep_get(dst_mm, dst_addr, dst_pte))) { in hugetlb_mfill_atomic_pte() 6210 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte, size); in hugetlb_mfill_atomic_pte() 6332 ptl = huge_pte_lock(h, dst_mm, dst_pte); in hugetlb_mfill_atomic_pte() 6340 dst_ptep = huge_ptep_get(dst_mm, dst_addr, dst_pte); in hugetlb_mfill_atomic_pte() 6371 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte, size); in hugetlb_mfill_atomic_pte() 6373 hugetlb_count_add(pages_per_huge_page(h), dst_mm); in hugetlb_mfill_atomic_pte()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_ttm.c | 305 struct amdgpu_res_cursor src_mm, dst_mm; in amdgpu_ttm_copy_mem_to_mem() local 318 amdgpu_res_first(dst->mem, dst->offset, size, &dst_mm); in amdgpu_ttm_copy_mem_to_mem() 327 cur_size = min3(src_mm.size, dst_mm.size, 256ULL << 20); in amdgpu_ttm_copy_mem_to_mem() 335 r = amdgpu_ttm_map_buffer(entity, dst->bo, dst->mem, &dst_mm, in amdgpu_ttm_copy_mem_to_mem() 372 amdgpu_res_next(&dst_mm, cur_size); in amdgpu_ttm_copy_mem_to_mem()
|