Lines Matching refs:umem
80 mr->umem = NULL; in mlx4_ib_get_dma_mr()
94 struct ib_umem *umem) in mlx4_ib_umem_write_mtt() argument
109 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in mlx4_ib_umem_write_mtt()
113 umem->page_size * k; in mlx4_ib_umem_write_mtt()
153 mr->umem = ib_umem_get(pd->uobject->context, start, length, in mlx4_ib_reg_user_mr()
155 if (IS_ERR(mr->umem)) { in mlx4_ib_reg_user_mr()
156 err = PTR_ERR(mr->umem); in mlx4_ib_reg_user_mr()
160 n = ib_umem_page_count(mr->umem); in mlx4_ib_reg_user_mr()
161 shift = ilog2(mr->umem->page_size); in mlx4_ib_reg_user_mr()
168 err = mlx4_ib_umem_write_mtt(dev, &mr->mmr.mtt, mr->umem); in mlx4_ib_reg_user_mr()
184 ib_umem_release(mr->umem); in mlx4_ib_reg_user_mr()
233 ib_umem_release(mmr->umem); in mlx4_ib_rereg_user_mr()
234 mmr->umem = ib_umem_get(mr->uobject->context, start, length, in mlx4_ib_rereg_user_mr()
238 if (IS_ERR(mmr->umem)) { in mlx4_ib_rereg_user_mr()
239 err = PTR_ERR(mmr->umem); in mlx4_ib_rereg_user_mr()
241 mmr->umem = NULL; in mlx4_ib_rereg_user_mr()
244 n = ib_umem_page_count(mmr->umem); in mlx4_ib_rereg_user_mr()
245 shift = ilog2(mmr->umem->page_size); in mlx4_ib_rereg_user_mr()
251 ib_umem_release(mmr->umem); in mlx4_ib_rereg_user_mr()
257 err = mlx4_ib_umem_write_mtt(dev, &mmr->mmr.mtt, mmr->umem); in mlx4_ib_rereg_user_mr()
260 ib_umem_release(mmr->umem); in mlx4_ib_rereg_user_mr()
336 if (mr->umem) in mlx4_ib_dereg_mr()
337 ib_umem_release(mr->umem); in mlx4_ib_dereg_mr()
416 mr->umem = NULL; in mlx4_ib_alloc_mr()