Home
last modified time | relevance | path

Searched refs:rdma_entry (Results 1 – 12 of 12) sorted by relevance

/linux/drivers/infiniband/hw/mlx5/
H A Ddm.c158 context, &mentry->rdma_entry, size, in add_dm_mmap_entry()
180 page_idx = op_entry->mentry.rdma_entry.start_pgoff & 0xFFFF; in copy_op_to_user()
267 rdma_user_mmap_entry_remove(&op_entry->mentry.rdma_entry); in UVERBS_HANDLER()
317 page_idx = dm->mentry.rdma_entry.start_pgoff & 0xFFFF; in handle_alloc_dm_memic()
333 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in handle_alloc_dm_memic()
459 rdma_user_mmap_entry_remove(&entry->mentry.rdma_entry); in dm_memic_remove_ops()
467 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in mlx5_dm_memic_dealloc()
520 page_idx = memic->mentry.rdma_entry.start_pgoff & 0xFFFF; in UVERBS_HANDLER()
H A Dmain.c2696 rdma_user_mmap_entry_put(&mentry->rdma_entry); in mlx5_ib_mmap_offset()
2702 u64 cmd = (entry->rdma_entry.start_pgoff >> 16) & 0xFFFF; in mlx5_entry_to_mmap_offset()
2703 u64 index = entry->rdma_entry.start_pgoff & 0xFFFF; in mlx5_entry_to_mmap_offset()
4128 rdma_user_mmap_entry_remove(&obj->rdma_entry); in mmap_obj_cleanup()
4137 &c->ibucontext, &entry->rdma_entry, length, in mlx5_rdma_user_mmap_entry_insert()
4208 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
4335 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
/linux/drivers/infiniband/hw/hns/
H A Dhns_roce_main.c380 ucontext, &entry->rdma_entry, length, 0); in hns_roce_user_mmap_entry_insert()
384 ucontext, &entry->rdma_entry, length, 1, in hns_roce_user_mmap_entry_insert()
404 &context->db_mmap_entry->rdma_entry); in hns_roce_dealloc_uar_entry()
523 struct rdma_user_mmap_entry *rdma_entry; in hns_roce_mmap() local
534 rdma_entry = rdma_user_mmap_entry_get_pgoff(uctx, vma->vm_pgoff); in hns_roce_mmap()
535 if (!rdma_entry) { in hns_roce_mmap()
540 entry = to_hns_mmap(rdma_entry); in hns_roce_mmap()
553 ret = rdma_user_mmap_io(uctx, vma, pfn, rdma_entry->npages * PAGE_SIZE, in hns_roce_mmap()
554 prot, rdma_entry); in hns_roce_mmap()
557 rdma_user_mmap_entry_put(rdma_entry); in hns_roce_mmap()
[all …]
/linux/drivers/infiniband/hw/efa/
H A Defa_verbs.c30 struct rdma_user_mmap_entry rdma_entry; member
173 to_emmap(struct rdma_user_mmap_entry *rdma_entry) in to_emmap() argument
175 return container_of(rdma_entry, struct efa_user_mmap_entry, rdma_entry); in to_emmap()
537 err = rdma_user_mmap_entry_insert(ucontext, &entry->rdma_entry, in efa_user_mmap_entry_insert()
543 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in efa_user_mmap_entry_insert()
545 return &entry->rdma_entry; in efa_user_mmap_entry_insert()
2017 void efa_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in efa_mmap_free() argument
2019 struct efa_user_mmap_entry *entry = to_emmap(rdma_entry); in efa_mmap_free()
2027 struct rdma_user_mmap_entry *rdma_entry; in __efa_mmap() local
2033 rdma_entry = rdma_user_mmap_entry_get(&ucontext->ibucontext, vma); in __efa_mmap()
[all …]
/linux/drivers/infiniband/sw/siw/
H A Dsiw_verbs.c47 void siw_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in siw_mmap_free() argument
49 struct siw_user_mmap_entry *entry = to_siw_mmap_entry(rdma_entry); in siw_mmap_free()
58 struct rdma_user_mmap_entry *rdma_entry; in siw_mmap() local
69 rdma_entry = rdma_user_mmap_entry_get(&uctx->base_ucontext, vma); in siw_mmap()
70 if (!rdma_entry) { in siw_mmap()
75 entry = to_siw_mmap_entry(rdma_entry); in siw_mmap()
81 rdma_user_mmap_entry_put(rdma_entry); in siw_mmap()
287 &entry->rdma_entry, in siw_mmap_entry_insert()
294 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in siw_mmap_entry_insert()
296 return &entry->rdma_entry; in siw_mmap_entry_insert()
H A Dsiw.h486 struct rdma_user_mmap_entry rdma_entry; member
592 return container_of(rdma_mmap, struct siw_user_mmap_entry, rdma_entry); in to_siw_mmap_entry()
/linux/drivers/infiniband/hw/ionic/
H A Dionic_controlpath.c354 rc = rdma_user_mmap_entry_insert(&ctx->ibctx, &entry->rdma_entry, in ionic_mmap_entry_insert()
362 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in ionic_mmap_entry_insert()
364 return &entry->rdma_entry; in ionic_mmap_entry_insert()
444 struct rdma_user_mmap_entry *rdma_entry; in ionic_mmap() local
448 rdma_entry = rdma_user_mmap_entry_get(&ctx->ibctx, vma); in ionic_mmap()
449 if (!rdma_entry) { in ionic_mmap()
455 ionic_entry = container_of(rdma_entry, struct ionic_mmap_entry, in ionic_mmap()
456 rdma_entry); in ionic_mmap()
469 rdma_entry); in ionic_mmap()
473 rdma_user_mmap_entry_put(rdma_entry); in ionic_mmap()
[all …]
/linux/drivers/infiniband/hw/erdma/
H A Derdma_verbs.c305 ret = rdma_user_mmap_entry_insert(&uctx->ibucontext, &entry->rdma_entry, in erdma_user_mmap_entry_insert()
312 *mmap_offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in erdma_user_mmap_entry_insert()
314 return &entry->rdma_entry; in erdma_user_mmap_entry_insert()
1412 struct rdma_user_mmap_entry *rdma_entry; in erdma_mmap() local
1417 rdma_entry = rdma_user_mmap_entry_get(ctx, vma); in erdma_mmap()
1418 if (!rdma_entry) in erdma_mmap()
1421 entry = to_emmap(rdma_entry); in erdma_mmap()
1434 prot, rdma_entry); in erdma_mmap()
1437 rdma_user_mmap_entry_put(rdma_entry); in erdma_mmap()
1441 void erdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in erdma_mmap_free() argument
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.c665 &entry->rdma_entry, PAGE_SIZE, 0); in bnxt_re_mmap_entry_insert()
673 &entry->rdma_entry, PAGE_SIZE); in bnxt_re_mmap_entry_insert()
685 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in bnxt_re_mmap_entry_insert()
757 pd->pd_db_mmap = &entry->rdma_entry; in bnxt_re_alloc_pd()
4407 uctx->shpage_mmap = &entry->rdma_entry; in bnxt_re_alloc_ucontext()
4590 struct rdma_user_mmap_entry *rdma_entry; in bnxt_re_mmap() local
4594 rdma_entry = rdma_user_mmap_entry_get(&uctx->ib_uctx, vma); in bnxt_re_mmap()
4595 if (!rdma_entry) in bnxt_re_mmap()
4598 bnxt_entry = container_of(rdma_entry, struct bnxt_re_user_mmap_entry, in bnxt_re_mmap()
4599 rdma_entry); in bnxt_re_mmap()
[all …]
/linux/drivers/infiniband/hw/qedr/
H A Dverbs.c310 rc = rdma_user_mmap_entry_insert(uctx, &entry->rdma_entry, in qedr_alloc_ucontext()
316 ctx->db_mmap_entry = &entry->rdma_entry; in qedr_alloc_ucontext()
375 void qedr_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in qedr_mmap_free() argument
377 struct qedr_user_mmap_entry *entry = get_qedr_mmap_entry(rdma_entry); in qedr_mmap_free()
392 struct rdma_user_mmap_entry *rdma_entry; in qedr_mmap() local
401 rdma_entry = rdma_user_mmap_entry_get(ucontext, vma); in qedr_mmap()
402 if (!rdma_entry) { in qedr_mmap()
407 entry = get_qedr_mmap_entry(rdma_entry); in qedr_mmap()
417 rdma_entry); in qedr_mmap()
432 rdma_user_mmap_entry_put(rdma_entry); in qedr_mmap()
[all …]
/linux/drivers/infiniband/hw/irdma/
H A Dverbs.h246 struct rdma_user_mmap_entry rdma_entry; member
H A Dverbs.c149 static void irdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in irdma_mmap_free() argument
151 struct irdma_user_mmap_entry *entry = to_irdma_mmap_entry(rdma_entry); in irdma_mmap_free()
170 &entry->rdma_entry, PAGE_SIZE); in irdma_user_mmap_entry_insert()
175 *mmap_offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in irdma_user_mmap_entry_insert()
177 return &entry->rdma_entry; in irdma_user_mmap_entry_insert()
187 struct rdma_user_mmap_entry *rdma_entry; in irdma_mmap() local
199 rdma_entry = rdma_user_mmap_entry_get(&ucontext->ibucontext, vma); in irdma_mmap()
200 if (!rdma_entry) { in irdma_mmap()
207 entry = to_irdma_mmap_entry(rdma_entry); in irdma_mmap()
219 rdma_entry); in irdma_mmap()
[all …]