Lines Matching refs:src_start

496 					      unsigned long src_start,
523 src_addr = src_start;
567 while (src_addr < src_start + len) {
655 unsigned long src_start,
706 unsigned long src_start,
725 VM_WARN_ON_ONCE(src_start + len <= src_start);
728 src_addr = src_start;
774 src_start, len, flags);
782 while (src_addr < src_start + len) {
869 unsigned long src_start, unsigned long len,
872 return mfill_atomic(ctx, dst_start, src_start, len,
1084 unsigned long src_start = src_addr;
1144 if (src_addr > src_start)
1145 flush_tlb_range(src_vma, src_start, src_addr);
1151 return src_addr > src_start ? src_addr - src_start : err;
1575 unsigned long src_start,
1587 /* Skip finding src_vma if src_start is in dst_vma */
1588 if (src_start >= vma->vm_start && src_start < vma->vm_end)
1591 vma = vma_lookup(mm, src_start);
1602 unsigned long src_start,
1615 * Skip finding src_vma if src_start is in dst_vma. This also ensures
1618 if (src_start >= vma->vm_start && src_start < vma->vm_end) {
1635 *src_vmap = lock_vma_under_rcu(mm, src_start);
1643 err = find_vmas_mm_locked(mm, dst_start, src_start, dst_vmap, src_vmap);
1678 unsigned long src_start,
1685 err = find_vmas_mm_locked(mm, dst_start, src_start, dst_vmap, src_vmap);
1703 * @src_start: start of the source virtual memory range
1767 unsigned long src_start, unsigned long len, __u64 mode)
1777 VM_WARN_ON_ONCE(src_start & ~PAGE_MASK);
1782 VM_WARN_ON_ONCE(src_start + len < src_start);
1785 err = uffd_move_lock(mm, dst_start, src_start, &dst_vma, &src_vma);
1802 if (src_start + len > src_vma->vm_end)
1814 for (src_addr = src_start, dst_addr = dst_start, src_end = src_start + len;
1858 if (move_splits_huge_pmd(dst_addr, src_addr, src_start + len) ||