Lines Matching refs:user_va
666 u64 user_va, size_t bcnt, u32 *bytes_mapped, in pagefault_real_mr() argument
680 start_idx = (user_va - ib_umem_start(odp)) >> page_shift; in pagefault_real_mr()
686 np = ib_umem_odp_map_dma_and_lock(odp, user_va, bcnt, access_mask, fault); in pagefault_real_mr()
706 (user_va - round_down(user_va, 1 << page_shift)); in pagefault_real_mr()
718 struct ib_umem_odp *odp_imr, u64 user_va, in pagefault_implicit_mr() argument
721 unsigned long end_idx = (user_va + bcnt - 1) >> MLX5_IMR_MTT_SHIFT; in pagefault_implicit_mr()
728 if (unlikely(user_va >= mlx5_imr_ksm_entries * MLX5_IMR_MTT_SIZE || in pagefault_implicit_mr()
729 mlx5_imr_ksm_entries * MLX5_IMR_MTT_SIZE - user_va < bcnt)) in pagefault_implicit_mr()
734 unsigned long idx = user_va >> MLX5_IMR_MTT_SHIFT; in pagefault_implicit_mr()
756 len = min_t(u64, user_va + bcnt, ib_umem_end(umem_odp)) - in pagefault_implicit_mr()
757 user_va; in pagefault_implicit_mr()
759 ret = pagefault_real_mr(mtt, umem_odp, user_va, len, in pagefault_implicit_mr()
766 user_va += len; in pagefault_implicit_mr()
864 u64 user_va; in pagefault_mr() local
867 &user_va)) in pagefault_mr()
871 if (user_va < ib_umem_start(odp)) in pagefault_mr()
872 user_va = ib_umem_start(odp); in pagefault_mr()
873 if ((user_va + bcnt) > ib_umem_end(odp)) in pagefault_mr()
874 bcnt = ib_umem_end(odp) - user_va; in pagefault_mr()
875 } else if (unlikely(user_va >= ib_umem_end(odp) || in pagefault_mr()
876 ib_umem_end(odp) - user_va < bcnt)) in pagefault_mr()
878 return pagefault_real_mr(mr, odp, user_va, bcnt, bytes_mapped, in pagefault_mr()