Lines Matching refs:imr
98 struct mlx5_ib_mr *imr, int flags) in populate_klm() argument
105 pklm->key = cpu_to_be32(imr->dev->null_mkey); in populate_klm()
131 lockdep_assert_held(&to_ib_umem_odp(imr->umem)->umem_mutex); in populate_klm()
132 lockdep_assert_held(&imr->dev->odp_srcu); in populate_klm()
135 struct mlx5_ib_mr *mtt = xa_load(&imr->implicit_children, idx); in populate_klm()
142 pklm->key = cpu_to_be32(imr->dev->null_mkey); in populate_klm()
215 struct mlx5_ib_mr *imr = mr->parent; in free_implicit_child_mr() local
216 struct ib_umem_odp *odp_imr = to_ib_umem_odp(imr->umem); in free_implicit_child_mr()
239 if (atomic_dec_and_test(&imr->num_deferred_work)) in free_implicit_child_mr()
240 wake_up(&imr->q_deferred_work); in free_implicit_child_mr()
265 struct mlx5_ib_mr *imr = mr->parent; in destroy_unused_implicit_child_mr() local
267 xa_lock(&imr->implicit_children); in destroy_unused_implicit_child_mr()
272 if (__xa_cmpxchg(&imr->implicit_children, idx, mr, NULL, GFP_ATOMIC) != in destroy_unused_implicit_child_mr()
276 atomic_inc(&imr->num_deferred_work); in destroy_unused_implicit_child_mr()
281 xa_unlock(&imr->implicit_children); in destroy_unused_implicit_child_mr()
465 static struct mlx5_ib_mr *implicit_get_child_mr(struct mlx5_ib_mr *imr, in implicit_get_child_mr() argument
473 odp = ib_umem_odp_alloc_child(to_ib_umem_odp(imr->umem), in implicit_get_child_mr()
479 ret = mr = mlx5_mr_cache_alloc(imr->dev, MLX5_IMR_MTT_CACHE_ENTRY, in implicit_get_child_mr()
480 imr->access_flags); in implicit_get_child_mr()
484 mr->ibmr.pd = imr->ibmr.pd; in implicit_get_child_mr()
489 mr->parent = imr; in implicit_get_child_mr()
506 ret = xa_cmpxchg(&imr->implicit_children, idx, NULL, mr, in implicit_get_child_mr()
520 mlx5_ib_dbg(imr->dev, "key %x mr %p\n", mr->mmkey.key, mr); in implicit_get_child_mr()
524 mlx5_mr_cache_free(imr->dev, mr); in implicit_get_child_mr()
536 struct mlx5_ib_mr *imr; in mlx5_ib_alloc_implicit_mr() local
543 imr = mlx5_mr_cache_alloc(dev, MLX5_IMR_KSM_CACHE_ENTRY, access_flags); in mlx5_ib_alloc_implicit_mr()
544 if (IS_ERR(imr)) { in mlx5_ib_alloc_implicit_mr()
545 err = PTR_ERR(imr); in mlx5_ib_alloc_implicit_mr()
549 imr->ibmr.pd = &pd->ibpd; in mlx5_ib_alloc_implicit_mr()
550 imr->mmkey.iova = 0; in mlx5_ib_alloc_implicit_mr()
551 imr->umem = &umem_odp->umem; in mlx5_ib_alloc_implicit_mr()
552 imr->ibmr.lkey = imr->mmkey.key; in mlx5_ib_alloc_implicit_mr()
553 imr->ibmr.rkey = imr->mmkey.key; in mlx5_ib_alloc_implicit_mr()
554 imr->umem = &umem_odp->umem; in mlx5_ib_alloc_implicit_mr()
555 imr->is_odp_implicit = true; in mlx5_ib_alloc_implicit_mr()
556 atomic_set(&imr->num_deferred_work, 0); in mlx5_ib_alloc_implicit_mr()
557 init_waitqueue_head(&imr->q_deferred_work); in mlx5_ib_alloc_implicit_mr()
558 xa_init(&imr->implicit_children); in mlx5_ib_alloc_implicit_mr()
560 err = mlx5_ib_update_xlt(imr, 0, in mlx5_ib_alloc_implicit_mr()
569 err = xa_err(xa_store(&dev->odp_mkeys, mlx5_base_mkey(imr->mmkey.key), in mlx5_ib_alloc_implicit_mr()
570 &imr->mmkey, GFP_KERNEL)); in mlx5_ib_alloc_implicit_mr()
574 mlx5_ib_dbg(dev, "key %x mr %p\n", imr->mmkey.key, imr); in mlx5_ib_alloc_implicit_mr()
575 return imr; in mlx5_ib_alloc_implicit_mr()
578 mlx5_mr_cache_free(dev, imr); in mlx5_ib_alloc_implicit_mr()
584 void mlx5_ib_free_implicit_mr(struct mlx5_ib_mr *imr) in mlx5_ib_free_implicit_mr() argument
586 struct ib_umem_odp *odp_imr = to_ib_umem_odp(imr->umem); in mlx5_ib_free_implicit_mr()
587 struct mlx5_ib_dev *dev = imr->dev; in mlx5_ib_free_implicit_mr()
595 xa_erase(&dev->odp_mkeys, mlx5_base_mkey(imr->mmkey.key)); in mlx5_ib_free_implicit_mr()
607 wait_event(imr->q_deferred_work, !atomic_read(&imr->num_deferred_work)); in mlx5_ib_free_implicit_mr()
620 xa_lock(&imr->implicit_children); in mlx5_ib_free_implicit_mr()
621 xa_for_each (&imr->implicit_children, idx, mtt) { in mlx5_ib_free_implicit_mr()
622 __xa_erase(&imr->implicit_children, idx); in mlx5_ib_free_implicit_mr()
625 xa_unlock(&imr->implicit_children); in mlx5_ib_free_implicit_mr()
631 wait_event(imr->q_deferred_work, !atomic_read(&imr->num_deferred_work)); in mlx5_ib_free_implicit_mr()
638 mlx5_mr_cache_invalidate(imr); in mlx5_ib_free_implicit_mr()
643 mlx5_mr_cache_free(dev, imr); in mlx5_ib_free_implicit_mr()
722 static int pagefault_implicit_mr(struct mlx5_ib_mr *imr, in pagefault_implicit_mr() argument
744 mtt = xa_load(&imr->implicit_children, idx); in pagefault_implicit_mr()
746 mtt = implicit_get_child_mr(imr, idx); in pagefault_implicit_mr()
789 err = mlx5_ib_update_xlt(imr, upd_start_idx, upd_len, 0, in pagefault_implicit_mr()
794 mlx5_ib_err(imr->dev, "Failed to update PAS\n"); in pagefault_implicit_mr()