Lines Matching refs:iwdev
48 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_query_device() local
49 struct irdma_pci_f *rf = iwdev->rf; in irdma_query_device()
50 struct pci_dev *pcidev = iwdev->rf->pcidev; in irdma_query_device()
58 if_getlladdr(iwdev->netdev)); in irdma_query_device()
106 pfn = ((uintptr_t)ucontext->iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET] + in irdma_mmap_legacy()
107 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap_legacy()
167 irdma_debug(&ucontext->iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_mmap()
174 irdma_debug(&ucontext->iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_mmap()
179 pci_resource_start(ucontext->iwdev->rf->pcidev, 0)) >> PAGE_SHIFT; in irdma_mmap()
197 irdma_debug(&ucontext->iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_mmap()
214 struct irdma_device *iwdev = iwqp->iwdev; in irdma_alloc_push_page() local
218 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_alloc_push_page()
230 cqp_info->in.u.manage_push_page.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_alloc_push_page()
233 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_alloc_push_page()
235 iwdev->rf->sc_dev.hw_attrs.max_hw_device_pages) { in irdma_alloc_push_page()
240 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_alloc_push_page()
282 static u64 irdma_compute_push_wqe_offset(struct irdma_device *iwdev, u32 page_idx){ in irdma_compute_push_wqe_offset() argument
283 u64 bar_off = (uintptr_t)iwdev->rf->sc_dev.hw_regs[IRDMA_DB_ADDR_OFFSET]; in irdma_compute_push_wqe_offset()
285 if (iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev == IRDMA_GEN_2) { in irdma_compute_push_wqe_offset()
317 struct irdma_device *iwdev = ucontext->iwdev; in irdma_setup_push_mmap_entries() local
320 WARN_ON_ONCE(iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev < IRDMA_GEN_2); in irdma_setup_push_mmap_entries()
322 bar_off = irdma_compute_push_wqe_offset(iwdev, iwqp->sc_qp.push_idx); in irdma_setup_push_mmap_entries()
350 irdma_setup_virt_qp(struct irdma_device *iwdev, in irdma_setup_virt_qp() argument
379 struct irdma_device *iwdev, in irdma_setup_umode_qp() argument
393 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_setup_umode_qp()
409 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_setup_umode_qp()
444 iwdev->rf->sc_dev.hw_attrs.uk_attrs.feature_flags & IRDMA_FEATURE_RTS_AE) in irdma_setup_umode_qp()
446 irdma_setup_virt_qp(iwdev, iwqp, info); in irdma_setup_umode_qp()
459 irdma_setup_kmode_qp(struct irdma_device *iwdev, in irdma_setup_kmode_qp() argument
509 mem->va = irdma_allocate_dma_mem(&iwdev->rf->hw, mem, mem->size, in irdma_setup_kmode_qp()
540 struct irdma_pci_f *rf = iwqp->iwdev->rf; in irdma_cqp_create_qp_cmd()
571 struct irdma_device *iwdev = iwqp->iwdev; in irdma_roce_fill_and_set_qpctx_info() local
572 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_roce_fill_and_set_qpctx_info()
577 udp_info->snd_mss = ib_mtu_enum_to_int(ib_mtu_int_to_enum(iwdev->vsi.mtu)); in irdma_roce_fill_and_set_qpctx_info()
578 udp_info->cwnd = iwdev->roce_cwnd; in irdma_roce_fill_and_set_qpctx_info()
584 ether_addr_copy(roce_info->mac_addr, if_getlladdr(iwdev->netdev)); in irdma_roce_fill_and_set_qpctx_info()
589 roce_info->rtomin = iwdev->roce_rtomin; in irdma_roce_fill_and_set_qpctx_info()
591 roce_info->ack_credits = iwdev->roce_ackcreds; in irdma_roce_fill_and_set_qpctx_info()
611 struct irdma_device *iwdev = iwqp->iwdev; in irdma_iw_fill_and_set_qpctx_info() local
612 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_iw_fill_and_set_qpctx_info()
616 ether_addr_copy(iwarp_info->mac_addr, if_getlladdr(iwdev->netdev)); in irdma_iw_fill_and_set_qpctx_info()
639 struct irdma_device *iwdev) in irdma_validate_qp_attrs() argument
641 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_validate_qp_attrs()
654 if (rdma_protocol_roce(&iwdev->ibdev, 1)) { in irdma_validate_qp_attrs()
676 if (mod_delayed_work(iwqp->iwdev->cleanup_wq, &iwqp->dwork_flush, in irdma_sched_qp_flush_work()
767 if (!wait_event_timeout(iwqp->iwdev->suspend_wq, in irdma_wait_for_suspend()
771 irdma_dev_warn(&iwqp->iwdev->ibdev, in irdma_wait_for_suspend()
795 struct irdma_device *iwdev = iwqp->iwdev; in irdma_modify_qp_roce() local
796 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp_roce()
885 if (vlan_id >= VLAN_N_VID && iwdev->dcb_vlan_mode) in irdma_modify_qp_roce()
928 irdma_add_arp(iwdev->rf, local_ip, in irdma_modify_qp_roce()
934 irdma_dev_err(&iwdev->ibdev, in irdma_modify_qp_roce()
947 irdma_dev_err(&iwdev->ibdev, in irdma_modify_qp_roce()
968 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_modify_qp_roce()
977 irdma_dev_warn(&iwdev->ibdev, in irdma_modify_qp_roce()
1021 iwdev->rf->check_fc(&iwdev->vsi, &iwqp->sc_qp); in irdma_modify_qp_roce()
1022 udp_info->cwnd = iwdev->roce_cwnd; in irdma_modify_qp_roce()
1023 roce_info->ack_credits = iwdev->roce_ackcreds; in irdma_modify_qp_roce()
1024 if (iwdev->push_mode && udata && in irdma_modify_qp_roce()
1082 if (irdma_hw_modify_qp(iwdev, iwqp, &info, true)) in irdma_modify_qp_roce()
1119 uresp.rd_fence_rate = iwdev->rd_fence_rate; in irdma_modify_qp_roce()
1124 irdma_debug(&iwdev->rf->sc_dev, in irdma_modify_qp_roce()
1153 struct irdma_device *iwdev = iwqp->iwdev; in irdma_modify_qp() local
1154 struct irdma_sc_dev *dev = &iwdev->rf->sc_dev; in irdma_modify_qp()
1179 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_modify_qp()
1200 if (iwdev->push_mode && udata && in irdma_modify_qp()
1277 iwdev->iw_status && in irdma_modify_qp()
1313 if (irdma_hw_modify_qp(iwdev, iwqp, &info, true)) in irdma_modify_qp()
1337 spin_lock_irqsave(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1341 spin_unlock_irqrestore(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1350 spin_unlock_irqrestore(&iwdev->cm_core.ht_lock, flags); in irdma_modify_qp()
1366 uresp.rd_fence_rate = iwdev->rd_fence_rate; in irdma_modify_qp()
1372 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_modify_qp()
1424 struct irdma_device *iwdev, in irdma_process_resize_list() argument
1437 queue_work(iwdev->cleanup_wq, &cq_buf->work); in irdma_process_resize_list()
1464 struct irdma_device *iwdev; in irdma_resize_cq() local
1470 iwdev = to_iwdev(ibcq->device); in irdma_resize_cq()
1471 rf = iwdev->rf; in irdma_resize_cq()
1616 irdma_free_stag(struct irdma_device *iwdev, u32 stag) in irdma_free_stag() argument
1620 stag_idx = (stag & iwdev->rf->mr_stagmask) >> IRDMA_CQPSQ_STAG_IDX_S; in irdma_free_stag()
1621 irdma_free_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, stag_idx); in irdma_free_stag()
1629 irdma_create_stag(struct irdma_device *iwdev) in irdma_create_stag() argument
1642 driver_key = random & ~iwdev->rf->mr_stagmask; in irdma_create_stag()
1643 next_stag_index = (random & iwdev->rf->mr_stagmask) >> 8; in irdma_create_stag()
1644 next_stag_index %= iwdev->rf->max_mr; in irdma_create_stag()
1646 ret = irdma_alloc_rsrc(iwdev->rf, iwdev->rf->allocated_mrs, in irdma_create_stag()
1647 iwdev->rf->max_mr, &stag_index, in irdma_create_stag()
1763 irdma_handle_q_mem(struct irdma_device *iwdev, in irdma_handle_q_mem() argument
1778 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_handle_q_mem()
1825 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, "MR type error\n"); in irdma_handle_q_mem()
1830 irdma_free_pble(iwdev->rf->pble_rsrc, palloc); in irdma_handle_q_mem()
1843 irdma_hw_alloc_stag(struct irdma_device *iwdev, in irdma_hw_alloc_stag() argument
1853 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hw_alloc_stag()
1868 cqp_info->in.u.alloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hw_alloc_stag()
1870 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hw_alloc_stag()
1871 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hw_alloc_stag()
1933 irdma_hwreg_mr(struct irdma_device *iwdev, struct irdma_mr *iwmr, in irdma_hwreg_mr() argument
1945 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hwreg_mr()
1958 iwdev->rf->sc_dev.hw_attrs.uk_attrs.hw_rev); in irdma_hwreg_mr()
1980 cqp_info->in.u.mr_reg_non_shared.dev = &iwdev->rf->sc_dev; in irdma_hwreg_mr()
1982 ret = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hwreg_mr()
1983 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hwreg_mr()
2041 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_mem() local
2049 err = irdma_setup_pbles(iwdev->rf, iwmr, lvl); in irdma_reg_user_mr_type_mem()
2057 irdma_free_pble(iwdev->rf->pble_rsrc, &iwpbl->pble_alloc); in irdma_reg_user_mr_type_mem()
2063 stag = irdma_create_stag(iwdev); in irdma_reg_user_mr_type_mem()
2074 err = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_user_mr_type_mem()
2082 irdma_free_stag(iwdev, stag); in irdma_reg_user_mr_type_mem()
2086 irdma_free_pble(iwdev->rf->pble_rsrc, &iwpbl->pble_alloc); in irdma_reg_user_mr_type_mem()
2099 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_qp() local
2113 err = irdma_handle_q_mem(iwdev, &req, iwpbl, lvl); in irdma_reg_user_mr_type_qp()
2134 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_reg_user_mr_type_cq() local
2143 …((iwdev->rf->sc_dev.hw_attrs.uk_attrs.feature_flags & IRDMA_FEATURE_CQ_RESIZE) ? 0 : IRDMA_SHADOW_… in irdma_reg_user_mr_type_cq()
2148 err = irdma_handle_q_mem(iwdev, &req, iwpbl, lvl); in irdma_reg_user_mr_type_cq()
2176 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_reg_user_mr() local
2182 if (len > iwdev->rf->sc_dev.hw_attrs.max_mr_size) in irdma_reg_user_mr()
2191 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_reg_user_mr()
2243 struct irdma_device *iwdev = to_iwdev(ib_mr->device); in irdma_hwdereg_mr() local
2259 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_hwdereg_mr()
2274 cqp_info->in.u.dealloc_stag.dev = &iwdev->rf->sc_dev; in irdma_hwdereg_mr()
2276 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_hwdereg_mr()
2277 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_hwdereg_mr()
2296 struct irdma_device *iwdev = to_iwdev(iwmr->ibmr.device); in irdma_rereg_mr_trans() local
2305 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_rereg_mr_trans()
2343 struct irdma_device *iwdev = to_iwdev(pd->device); in irdma_reg_phys_mr() local
2359 stag = irdma_create_stag(iwdev); in irdma_reg_phys_mr()
2373 ret = irdma_hwreg_mr(iwdev, iwmr, access); in irdma_reg_phys_mr()
2375 irdma_free_stag(iwdev, stag); in irdma_reg_phys_mr()
2456 dev = &iwqp->iwdev->rf->sc_dev; in irdma_post_send()
2585 irdma_debug(&iwqp->iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_post_send()
2640 irdma_debug(&iwqp->iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_post_recv()
2804 struct irdma_device *iwdev; in __irdma_poll_cq() local
2810 iwdev = to_iwdev(iwcq->ibcq.device); in __irdma_poll_cq()
2865 resized_bufs = irdma_process_resize_list(iwcq, iwdev, NULL); in __irdma_poll_cq()
2868 resized_bufs = irdma_process_resize_list(iwcq, iwdev, last_buf); in __irdma_poll_cq()
2875 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in __irdma_poll_cq()
2998 irdma_mcast_cqp_op(struct irdma_device *iwdev, in irdma_mcast_cqp_op() argument
3005 cqp_request = irdma_alloc_and_get_cqp_request(&iwdev->rf->cqp, true); in irdma_mcast_cqp_op()
3014 cqp_info->in.u.mc_create.cqp = &iwdev->rf->cqp.sc_cqp; in irdma_mcast_cqp_op()
3015 status = irdma_handle_cqp_op(iwdev->rf, cqp_request); in irdma_mcast_cqp_op()
3016 irdma_put_cqp_request(&iwdev->rf->cqp, cqp_request); in irdma_mcast_cqp_op()
3033 struct irdma_device *iwdev = iwqp->iwdev; in irdma_attach_mcast() local
3034 struct irdma_pci_f *rf = iwdev->rf; in irdma_attach_mcast()
3054 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_attach_mcast()
3063 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_attach_mcast()
3108 mc_qht_elem->mc_grp_ctx.hmc_fcn_id = iwdev->rf->sc_dev.hmc_fn_id; in irdma_attach_mcast()
3130 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_attach_mcast()
3133 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_attach_mcast()
3170 struct irdma_device *iwdev = iwqp->iwdev; in irdma_detach_mcast() local
3171 struct irdma_pci_f *rf = iwdev->rf; in irdma_detach_mcast()
3190 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_detach_mcast()
3200 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_detach_mcast()
3203 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_detach_mcast()
3218 ret = irdma_mcast_cqp_op(iwdev, &mc_qht_elem->mc_grp_ctx, in irdma_detach_mcast()
3221 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, in irdma_detach_mcast()
3256 struct irdma_device *iwdev = to_iwdev(ibdev); in irdma_get_netdev() local
3258 if (iwdev->netdev) { in irdma_get_netdev()
3259 dev_hold(iwdev->netdev); in irdma_get_netdev()
3260 return iwdev->netdev; in irdma_get_netdev()
3364 irdma_init_roce_device(struct irdma_device *iwdev) in irdma_init_roce_device() argument
3366 kc_set_roce_uverbs_cmd_mask(iwdev); in irdma_init_roce_device()
3367 iwdev->ibdev.node_type = RDMA_NODE_IB_CA; in irdma_init_roce_device()
3368 addrconf_addr_eui48((u8 *)&iwdev->ibdev.node_guid, in irdma_init_roce_device()
3369 if_getlladdr(iwdev->netdev)); in irdma_init_roce_device()
3370 irdma_set_device_roce_ops(&iwdev->ibdev); in irdma_init_roce_device()
3371 if (iwdev->rf->rdma_ver == IRDMA_GEN_2) in irdma_init_roce_device()
3372 irdma_set_device_mcast_ops(&iwdev->ibdev); in irdma_init_roce_device()
3380 irdma_init_iw_device(struct irdma_device *iwdev) in irdma_init_iw_device() argument
3382 if_t netdev = iwdev->netdev; in irdma_init_iw_device()
3384 iwdev->ibdev.node_type = RDMA_NODE_RNIC; in irdma_init_iw_device()
3385 addrconf_addr_eui48((u8 *)&iwdev->ibdev.node_guid, in irdma_init_iw_device()
3387 iwdev->ibdev.iwcm = kzalloc(sizeof(*iwdev->ibdev.iwcm), GFP_KERNEL); in irdma_init_iw_device()
3388 if (!iwdev->ibdev.iwcm) in irdma_init_iw_device()
3391 iwdev->ibdev.iwcm->add_ref = irdma_qp_add_ref; in irdma_init_iw_device()
3392 iwdev->ibdev.iwcm->rem_ref = irdma_qp_rem_ref; in irdma_init_iw_device()
3393 iwdev->ibdev.iwcm->get_qp = irdma_get_qp; in irdma_init_iw_device()
3394 iwdev->ibdev.iwcm->connect = irdma_connect; in irdma_init_iw_device()
3395 iwdev->ibdev.iwcm->accept = irdma_accept; in irdma_init_iw_device()
3396 iwdev->ibdev.iwcm->reject = irdma_reject; in irdma_init_iw_device()
3397 iwdev->ibdev.iwcm->create_listen = irdma_create_listen; in irdma_init_iw_device()
3398 iwdev->ibdev.iwcm->destroy_listen = irdma_destroy_listen; in irdma_init_iw_device()
3399 memcpy(iwdev->ibdev.iwcm->ifname, if_name(netdev), in irdma_init_iw_device()
3400 sizeof(iwdev->ibdev.iwcm->ifname)); in irdma_init_iw_device()
3401 irdma_set_device_iw_ops(&iwdev->ibdev); in irdma_init_iw_device()
3411 irdma_init_rdma_device(struct irdma_device *iwdev) in irdma_init_rdma_device() argument
3415 iwdev->ibdev.owner = THIS_MODULE; in irdma_init_rdma_device()
3416 iwdev->ibdev.uverbs_abi_ver = IRDMA_ABI_VER; in irdma_init_rdma_device()
3417 kc_set_rdma_uverbs_cmd_mask(iwdev); in irdma_init_rdma_device()
3419 if (iwdev->roce_mode) { in irdma_init_rdma_device()
3420 irdma_init_roce_device(iwdev); in irdma_init_rdma_device()
3422 ret = irdma_init_iw_device(iwdev); in irdma_init_rdma_device()
3427 iwdev->ibdev.phys_port_cnt = 1; in irdma_init_rdma_device()
3428 iwdev->ibdev.num_comp_vectors = iwdev->rf->ceqs_count; in irdma_init_rdma_device()
3429 iwdev->ibdev.dev.parent = iwdev->rf->dev_ctx.dev; in irdma_init_rdma_device()
3430 set_ibdev_dma_device(iwdev->ibdev, &iwdev->rf->pcidev->dev); in irdma_init_rdma_device()
3431 irdma_set_device_ops(&iwdev->ibdev); in irdma_init_rdma_device()
3432 if (iwdev->rf->rdma_ver == IRDMA_GEN_1) in irdma_init_rdma_device()
3433 irdma_set_device_gen1_ops(&iwdev->ibdev); in irdma_init_rdma_device()
3443 irdma_port_ibevent(struct irdma_device *iwdev) in irdma_port_ibevent() argument
3447 event.device = &iwdev->ibdev; in irdma_port_ibevent()
3450 iwdev->iw_status ? IB_EVENT_PORT_ACTIVE : IB_EVENT_PORT_ERR; in irdma_port_ibevent()
3460 irdma_ib_unregister_device(struct irdma_device *iwdev) in irdma_ib_unregister_device() argument
3462 iwdev->iw_status = 0; in irdma_ib_unregister_device()
3463 irdma_port_ibevent(iwdev); in irdma_ib_unregister_device()
3464 ib_unregister_device(&iwdev->ibdev); in irdma_ib_unregister_device()
3465 dev_put(iwdev->netdev); in irdma_ib_unregister_device()
3466 kfree(iwdev->ibdev.iwcm); in irdma_ib_unregister_device()
3467 iwdev->ibdev.iwcm = NULL; in irdma_ib_unregister_device()
3475 irdma_ib_register_device(struct irdma_device *iwdev) in irdma_ib_register_device() argument
3479 ret = irdma_init_rdma_device(iwdev); in irdma_ib_register_device()
3483 dev_hold(iwdev->netdev); in irdma_ib_register_device()
3484 sprintf(iwdev->ibdev.name, "irdma-%s", if_name(iwdev->netdev)); in irdma_ib_register_device()
3485 ret = ib_register_device(&iwdev->ibdev, NULL); in irdma_ib_register_device()
3489 iwdev->iw_status = 1; in irdma_ib_register_device()
3490 irdma_port_ibevent(iwdev); in irdma_ib_register_device()
3495 kfree(iwdev->ibdev.iwcm); in irdma_ib_register_device()
3496 iwdev->ibdev.iwcm = NULL; in irdma_ib_register_device()
3497 irdma_debug(&iwdev->rf->sc_dev, IRDMA_DEBUG_VERBS, "Register RDMA device fail\n"); in irdma_ib_register_device()