Lines Matching refs:hw_mr
1108 rc = ecore_rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid); in qlnxr_get_dma_mr()
1115 mr->hw_mr.tid_type = ECORE_RDMA_TID_REGISTERED_MR; in qlnxr_get_dma_mr()
1116 mr->hw_mr.pd = pd->pd_id; in qlnxr_get_dma_mr()
1117 mr->hw_mr.local_read = 1; in qlnxr_get_dma_mr()
1118 mr->hw_mr.local_write = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0; in qlnxr_get_dma_mr()
1119 mr->hw_mr.remote_read = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0; in qlnxr_get_dma_mr()
1120 mr->hw_mr.remote_write = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0; in qlnxr_get_dma_mr()
1121 mr->hw_mr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0; in qlnxr_get_dma_mr()
1122 mr->hw_mr.dma_mr = true; in qlnxr_get_dma_mr()
1124 rc = ecore_rdma_register_tid(dev->rdma_ctx, &mr->hw_mr); in qlnxr_get_dma_mr()
1130 mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key; in qlnxr_get_dma_mr()
1132 if (mr->hw_mr.remote_write || mr->hw_mr.remote_read || in qlnxr_get_dma_mr()
1133 mr->hw_mr.remote_atomic) { in qlnxr_get_dma_mr()
1134 mr->ibmr.rkey = mr->hw_mr.itid << 8 | mr->hw_mr.key; in qlnxr_get_dma_mr()
1142 ecore_rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid); in qlnxr_get_dma_mr()
1502 rc = ecore_rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid); in qlnxr_reg_user_mr()
1510 mr->hw_mr.tid_type = ECORE_RDMA_TID_REGISTERED_MR; in qlnxr_reg_user_mr()
1511 mr->hw_mr.key = 0; in qlnxr_reg_user_mr()
1512 mr->hw_mr.pd = pd->pd_id; in qlnxr_reg_user_mr()
1513 mr->hw_mr.local_read = 1; in qlnxr_reg_user_mr()
1514 mr->hw_mr.local_write = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0; in qlnxr_reg_user_mr()
1515 mr->hw_mr.remote_read = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0; in qlnxr_reg_user_mr()
1516 mr->hw_mr.remote_write = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0; in qlnxr_reg_user_mr()
1517 mr->hw_mr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0; in qlnxr_reg_user_mr()
1518 mr->hw_mr.mw_bind = false; /* TBD MW BIND */ in qlnxr_reg_user_mr()
1519 mr->hw_mr.pbl_ptr = mr->info.pbl_table[0].pa; in qlnxr_reg_user_mr()
1520 mr->hw_mr.pbl_two_level = mr->info.pbl_info.two_layered; in qlnxr_reg_user_mr()
1521 mr->hw_mr.pbl_page_size_log = ilog2(mr->info.pbl_info.pbl_size); in qlnxr_reg_user_mr()
1522 mr->hw_mr.page_size_log = ilog2(mr->umem->page_size); /* for the MR pages */ in qlnxr_reg_user_mr()
1524 mr->hw_mr.fbo = ib_umem_offset(mr->umem); in qlnxr_reg_user_mr()
1525 mr->hw_mr.length = len; in qlnxr_reg_user_mr()
1526 mr->hw_mr.vaddr = usr_addr; in qlnxr_reg_user_mr()
1527 mr->hw_mr.zbva = false; /* TBD figure when this should be true */ in qlnxr_reg_user_mr()
1528 mr->hw_mr.phy_mr = false; /* Fast MR - True, Regular Register False */ in qlnxr_reg_user_mr()
1529 mr->hw_mr.dma_mr = false; in qlnxr_reg_user_mr()
1531 rc = ecore_rdma_register_tid(dev->rdma_ctx, &mr->hw_mr); in qlnxr_reg_user_mr()
1537 mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key; in qlnxr_reg_user_mr()
1538 if (mr->hw_mr.remote_write || mr->hw_mr.remote_read || in qlnxr_reg_user_mr()
1539 mr->hw_mr.remote_atomic) in qlnxr_reg_user_mr()
1540 mr->ibmr.rkey = mr->hw_mr.itid << 8 | mr->hw_mr.key; in qlnxr_reg_user_mr()
1547 ecore_rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid); in qlnxr_reg_user_mr()
5387 rc = ecore_rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid); in __qlnxr_alloc_mr()
5394 mr->hw_mr.tid_type = ECORE_RDMA_TID_FMR; in __qlnxr_alloc_mr()
5395 mr->hw_mr.key = 0; in __qlnxr_alloc_mr()
5396 mr->hw_mr.pd = pd->pd_id; in __qlnxr_alloc_mr()
5397 mr->hw_mr.local_read = 1; in __qlnxr_alloc_mr()
5398 mr->hw_mr.local_write = 0; in __qlnxr_alloc_mr()
5399 mr->hw_mr.remote_read = 0; in __qlnxr_alloc_mr()
5400 mr->hw_mr.remote_write = 0; in __qlnxr_alloc_mr()
5401 mr->hw_mr.remote_atomic = 0; in __qlnxr_alloc_mr()
5402 mr->hw_mr.mw_bind = false; /* TBD MW BIND */ in __qlnxr_alloc_mr()
5403 mr->hw_mr.pbl_ptr = 0; /* Will be supplied during post */ in __qlnxr_alloc_mr()
5404 mr->hw_mr.pbl_two_level = mr->info.pbl_info.two_layered; in __qlnxr_alloc_mr()
5405 mr->hw_mr.pbl_page_size_log = ilog2(mr->info.pbl_info.pbl_size); in __qlnxr_alloc_mr()
5406 mr->hw_mr.fbo = 0; in __qlnxr_alloc_mr()
5407 mr->hw_mr.length = 0; in __qlnxr_alloc_mr()
5408 mr->hw_mr.vaddr = 0; in __qlnxr_alloc_mr()
5409 mr->hw_mr.zbva = false; /* TBD figure when this should be true */ in __qlnxr_alloc_mr()
5410 mr->hw_mr.phy_mr = true; /* Fast MR - True, Regular Register False */ in __qlnxr_alloc_mr()
5411 mr->hw_mr.dma_mr = false; in __qlnxr_alloc_mr()
5413 rc = ecore_rdma_register_tid(dev->rdma_ctx, &mr->hw_mr); in __qlnxr_alloc_mr()
5419 mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key; in __qlnxr_alloc_mr()
5428 ecore_rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid); in __qlnxr_alloc_mr()