Home
last modified time | relevance | path

Searched refs:rdma_entry (Results 1 – 12 of 12) sorted by relevance

/linux/drivers/infiniband/hw/mlx5/
H A Ddm.c158 context, &mentry->rdma_entry, size, in add_dm_mmap_entry()
180 page_idx = op_entry->mentry.rdma_entry.start_pgoff & 0xFFFF; in copy_op_to_user()
267 rdma_user_mmap_entry_remove(&op_entry->mentry.rdma_entry); in UVERBS_HANDLER()
317 page_idx = dm->mentry.rdma_entry.start_pgoff & 0xFFFF; in handle_alloc_dm_memic()
333 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in handle_alloc_dm_memic()
459 rdma_user_mmap_entry_remove(&entry->mentry.rdma_entry); in dm_memic_remove_ops()
467 rdma_user_mmap_entry_remove(&dm->mentry.rdma_entry); in mlx5_dm_memic_dealloc()
520 page_idx = memic->mentry.rdma_entry.start_pgoff & 0xFFFF; in UVERBS_HANDLER()
H A Dmain.c2501 rdma_user_mmap_entry_put(&mentry->rdma_entry); in mlx5_ib_mmap_offset()
2507 u64 cmd = (entry->rdma_entry.start_pgoff >> 16) & 0xFFFF; in mlx5_entry_to_mmap_offset()
2508 u64 index = entry->rdma_entry.start_pgoff & 0xFFFF; in mlx5_entry_to_mmap_offset()
3874 rdma_user_mmap_entry_remove(&obj->rdma_entry); in mmap_obj_cleanup()
3883 &c->ibucontext, &entry->rdma_entry, length, in mlx5_rdma_user_mmap_entry_insert()
3954 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
4081 length = entry->rdma_entry.npages * PAGE_SIZE; in UVERBS_HANDLER()
/linux/drivers/infiniband/hw/hns/
H A Dhns_roce_main.c375 ucontext, &entry->rdma_entry, length, 0); in hns_roce_user_mmap_entry_insert()
379 ucontext, &entry->rdma_entry, length, 1, in hns_roce_user_mmap_entry_insert()
399 &context->db_mmap_entry->rdma_entry); in hns_roce_dealloc_uar_entry()
518 struct rdma_user_mmap_entry *rdma_entry; in hns_roce_mmap() local
529 rdma_entry = rdma_user_mmap_entry_get_pgoff(uctx, vma->vm_pgoff); in hns_roce_mmap()
530 if (!rdma_entry) { in hns_roce_mmap()
535 entry = to_hns_mmap(rdma_entry); in hns_roce_mmap()
548 ret = rdma_user_mmap_io(uctx, vma, pfn, rdma_entry->npages * PAGE_SIZE, in hns_roce_mmap()
549 prot, rdma_entry); in hns_roce_mmap()
552 rdma_user_mmap_entry_put(rdma_entry); in hns_roce_mmap()
[all …]
H A Dhns_roce_device.h211 struct rdma_user_mmap_entry rdma_entry; member
1092 to_hns_mmap(struct rdma_user_mmap_entry *rdma_entry) in to_hns_mmap() argument
1094 return container_of(rdma_entry, struct hns_user_mmap_entry, rdma_entry); in to_hns_mmap()
H A Dhns_roce_qp.c386 rdma_user_mmap_entry_remove(&hr_qp->dwqe_mmap_entry->rdma_entry); in qp_user_mmap_entry_remove()
840 struct rdma_user_mmap_entry *rdma_entry; in qp_mmap_entry() local
855 rdma_entry = &hr_qp->dwqe_mmap_entry->rdma_entry; in qp_mmap_entry()
856 resp->dwqe_mmap_key = rdma_user_mmap_get_offset(rdma_entry); in qp_mmap_entry()
/linux/drivers/infiniband/hw/efa/
H A Defa_verbs.c30 struct rdma_user_mmap_entry rdma_entry; member
173 to_emmap(struct rdma_user_mmap_entry *rdma_entry) in to_emmap() argument
175 return container_of(rdma_entry, struct efa_user_mmap_entry, rdma_entry); in to_emmap()
537 err = rdma_user_mmap_entry_insert(ucontext, &entry->rdma_entry, in efa_user_mmap_entry_insert()
543 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in efa_user_mmap_entry_insert()
545 return &entry->rdma_entry; in efa_user_mmap_entry_insert()
2019 void efa_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in efa_mmap_free() argument
2021 struct efa_user_mmap_entry *entry = to_emmap(rdma_entry); in efa_mmap_free()
2029 struct rdma_user_mmap_entry *rdma_entry; in __efa_mmap() local
2035 rdma_entry = rdma_user_mmap_entry_get(&ucontext->ibucontext, vma); in __efa_mmap()
[all …]
/linux/drivers/infiniband/hw/qedr/
H A Dqedr.h499 struct rdma_user_mmap_entry rdma_entry; member
637 get_qedr_mmap_entry(struct rdma_user_mmap_entry *rdma_entry) in get_qedr_mmap_entry() argument
639 return container_of(rdma_entry, struct qedr_user_mmap_entry, in get_qedr_mmap_entry()
640 rdma_entry); in get_qedr_mmap_entry()
/linux/drivers/infiniband/hw/irdma/
H A Dmain.h387 to_irdma_mmap_entry(struct rdma_user_mmap_entry *rdma_entry) in to_irdma_mmap_entry() argument
389 return container_of(rdma_entry, struct irdma_user_mmap_entry, in to_irdma_mmap_entry()
390 rdma_entry); in to_irdma_mmap_entry()
H A Dverbs.h246 struct rdma_user_mmap_entry rdma_entry; member
H A Dverbs.c149 static void irdma_mmap_free(struct rdma_user_mmap_entry *rdma_entry) in irdma_mmap_free() argument
151 struct irdma_user_mmap_entry *entry = to_irdma_mmap_entry(rdma_entry); in irdma_mmap_free()
170 &entry->rdma_entry, PAGE_SIZE); in irdma_user_mmap_entry_insert()
175 *mmap_offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in irdma_user_mmap_entry_insert()
177 return &entry->rdma_entry; in irdma_user_mmap_entry_insert()
187 struct rdma_user_mmap_entry *rdma_entry; in irdma_mmap() local
199 rdma_entry = rdma_user_mmap_entry_get(&ucontext->ibucontext, vma); in irdma_mmap()
200 if (!rdma_entry) { in irdma_mmap()
207 entry = to_irdma_mmap_entry(rdma_entry); in irdma_mmap()
219 rdma_entry); in irdma_mmap()
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.c648 &entry->rdma_entry, PAGE_SIZE, 0); in bnxt_re_mmap_entry_insert()
656 &entry->rdma_entry, PAGE_SIZE); in bnxt_re_mmap_entry_insert()
668 *offset = rdma_user_mmap_get_offset(&entry->rdma_entry); in bnxt_re_mmap_entry_insert()
740 pd->pd_db_mmap = &entry->rdma_entry; in bnxt_re_alloc_pd()
4382 uctx->shpage_mmap = &entry->rdma_entry; in bnxt_re_alloc_ucontext()
4562 struct rdma_user_mmap_entry *rdma_entry; in bnxt_re_mmap() local
4566 rdma_entry = rdma_user_mmap_entry_get(&uctx->ib_uctx, vma); in bnxt_re_mmap()
4567 if (!rdma_entry) in bnxt_re_mmap()
4570 bnxt_entry = container_of(rdma_entry, struct bnxt_re_user_mmap_entry, in bnxt_re_mmap()
4571 rdma_entry); in bnxt_re_mmap()
[all …]
/linux/drivers/infiniband/sw/siw/
H A Dsiw.h486 struct rdma_user_mmap_entry rdma_entry; member
592 return container_of(rdma_mmap, struct siw_user_mmap_entry, rdma_entry); in to_siw_mmap_entry()