Lines Matching refs:user_va
671 u64 user_va, size_t bcnt, u32 *bytes_mapped, in pagefault_real_mr() argument
685 start_idx = (user_va - ib_umem_start(odp)) >> page_shift; in pagefault_real_mr()
691 np = ib_umem_odp_map_dma_and_lock(odp, user_va, bcnt, access_mask, fault); in pagefault_real_mr()
711 (user_va - round_down(user_va, 1 << page_shift)); in pagefault_real_mr()
723 struct ib_umem_odp *odp_imr, u64 user_va, in pagefault_implicit_mr() argument
726 unsigned long end_idx = (user_va + bcnt - 1) >> MLX5_IMR_MTT_SHIFT; in pagefault_implicit_mr()
733 if (unlikely(user_va >= mlx5_imr_ksm_entries * MLX5_IMR_MTT_SIZE || in pagefault_implicit_mr()
734 mlx5_imr_ksm_entries * MLX5_IMR_MTT_SIZE - user_va < bcnt)) in pagefault_implicit_mr()
739 unsigned long idx = user_va >> MLX5_IMR_MTT_SHIFT; in pagefault_implicit_mr()
756 len = min_t(u64, user_va + bcnt, ib_umem_end(umem_odp)) - in pagefault_implicit_mr()
757 user_va; in pagefault_implicit_mr()
759 ret = pagefault_real_mr(mtt, umem_odp, user_va, len, in pagefault_implicit_mr()
763 user_va += len; in pagefault_implicit_mr()
819 u64 user_va; in pagefault_mr() local
822 (u64)odp->umem.address, &user_va)) in pagefault_mr()
824 if (unlikely(user_va >= ib_umem_end(odp) || in pagefault_mr()
825 ib_umem_end(odp) - user_va < bcnt)) in pagefault_mr()
827 return pagefault_real_mr(mr, odp, user_va, bcnt, bytes_mapped, in pagefault_mr()