Lines Matching full:rf
206 * @rf: RDMA PCI function
212 irdma_arp_table(struct irdma_pci_f *rf, u32 *ip_addr, const u8 *mac_addr, in irdma_arp_table() argument
221 spin_lock_irqsave(&rf->arp_lock, flags); in irdma_arp_table()
222 for (arp_index = 0; (u32)arp_index < rf->arp_table_size; arp_index++) { in irdma_arp_table()
223 if (!memcmp(rf->arp_table[arp_index].ip_addr, ip, sizeof(ip))) in irdma_arp_table()
229 if (arp_index != rf->arp_table_size) { in irdma_arp_table()
235 if (irdma_alloc_rsrc(rf, rf->allocated_arps, rf->arp_table_size, in irdma_arp_table()
236 (u32 *)&arp_index, &rf->next_arp_index)) { in irdma_arp_table()
241 memcpy(rf->arp_table[arp_index].ip_addr, ip, in irdma_arp_table()
242 sizeof(rf->arp_table[arp_index].ip_addr)); in irdma_arp_table()
243 ether_addr_copy(rf->arp_table[arp_index].mac_addr, mac_addr); in irdma_arp_table()
246 if (arp_index == rf->arp_table_size) in irdma_arp_table()
250 if (arp_index == rf->arp_table_size) { in irdma_arp_table()
255 memset(rf->arp_table[arp_index].ip_addr, 0, in irdma_arp_table()
256 sizeof(rf->arp_table[arp_index].ip_addr)); in irdma_arp_table()
257 eth_zero_addr(rf->arp_table[arp_index].mac_addr); in irdma_arp_table()
258 irdma_free_rsrc(rf, rf->allocated_arps, arp_index); in irdma_arp_table()
265 spin_unlock_irqrestore(&rf->arp_lock, flags); in irdma_arp_table()
271 * @rf: RDMA function
276 irdma_add_arp(struct irdma_pci_f *rf, u32 *ip, const u8 *mac) in irdma_add_arp() argument
280 arpidx = irdma_arp_table(rf, &ip[0], NULL, IRDMA_ARP_RESOLVE); in irdma_add_arp()
282 if (ether_addr_equal(rf->arp_table[arpidx].mac_addr, mac)) in irdma_add_arp()
285 irdma_manage_arp_cache(rf, rf->arp_table[arpidx].mac_addr, ip, in irdma_add_arp()
289 irdma_manage_arp_cache(rf, mac, ip, IRDMA_ARP_ADD); in irdma_add_arp()
291 return irdma_arp_table(rf, ip, NULL, IRDMA_ARP_RESOLVE); in irdma_add_arp()
454 * @rf: RDMA PCI function
457 irdma_cleanup_pending_cqp_op(struct irdma_pci_f *rf) in irdma_cleanup_pending_cqp_op() argument
459 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_cleanup_pending_cqp_op()
460 struct irdma_cqp *cqp = &rf->cqp; in irdma_cleanup_pending_cqp_op()
486 * @rf: RDMA PCI function
490 irdma_wait_event(struct irdma_pci_f *rf, in irdma_wait_event() argument
497 cqp_timeout.compl_cqp_cmds = atomic64_read(&rf->sc_dev.cqp->completed_ops); in irdma_wait_event()
499 int wait_time_ms = rf->sc_dev.hw_attrs.max_cqp_compl_wait_time_ms; in irdma_wait_event()
501 irdma_cqp_ce_handler(rf, &rf->ccq.sc_cq); in irdma_wait_event()
507 irdma_check_cqp_progress(&cqp_timeout, &rf->sc_dev); in irdma_wait_event()
512 if (!rf->reset) { in irdma_wait_event()
513 rf->reset = true; in irdma_wait_event()
514 rf->gen_ops.request_reset(rf); in irdma_wait_event()
526 if (!rf->reset) { in irdma_wait_event()
527 rf->reset = true; in irdma_wait_event()
528 rf->gen_ops.request_reset(rf); in irdma_wait_event()
625 * @rf: RDMA PCI function
629 irdma_handle_cqp_op(struct irdma_pci_f *rf, in irdma_handle_cqp_op() argument
632 struct irdma_sc_dev *dev = &rf->sc_dev; in irdma_handle_cqp_op()
637 if (rf->reset) in irdma_handle_cqp_op()
647 status = irdma_wait_event(rf, cqp_request); in irdma_handle_cqp_op()
658 irdma_dev_err(&rf->iwdev->ibdev, in irdma_handle_cqp_op()
666 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_handle_cqp_op()
686 spin_lock_irqsave(&iwdev->rf->qptable_lock, flags); in irdma_qp_rem_ref()
688 spin_unlock_irqrestore(&iwdev->rf->qptable_lock, flags); in irdma_qp_rem_ref()
692 iwdev->rf->qp_table[iwqp->ibqp.qp_num] = NULL; in irdma_qp_rem_ref()
693 spin_unlock_irqrestore(&iwdev->rf->qptable_lock, flags); in irdma_qp_rem_ref()
709 struct irdma_pci_f *rf = container_of(iwcq->sc_cq.dev, struct irdma_pci_f, sc_dev); in irdma_cq_rem_ref() local
712 spin_lock_irqsave(&rf->cqtable_lock, flags); in irdma_cq_rem_ref()
714 spin_unlock_irqrestore(&rf->cqtable_lock, flags); in irdma_cq_rem_ref()
718 rf->cq_table[iwcq->cq_num] = NULL; in irdma_cq_rem_ref()
719 spin_unlock_irqrestore(&rf->cqtable_lock, flags); in irdma_cq_rem_ref()
739 if (qpn < IW_FIRST_QPN || qpn >= iwdev->rf->max_qp) in irdma_get_qp()
742 return &iwdev->rf->qp_table[qpn]->ibqp; in irdma_get_qp()
776 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_sds_cmd() local
779 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_sds_cmd()
791 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_sds_cmd()
792 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_sds_cmd()
809 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_qp_suspend_resume() local
812 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, false); in irdma_cqp_qp_suspend_resume()
822 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_qp_suspend_resume()
823 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_qp_suspend_resume()
931 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_query_fpm_val_cmd() local
934 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_query_fpm_val_cmd()
948 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_query_fpm_val_cmd()
949 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_query_fpm_val_cmd()
966 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_commit_fpm_val_cmd() local
969 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_commit_fpm_val_cmd()
983 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_commit_fpm_val_cmd()
984 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_commit_fpm_val_cmd()
997 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_cq_create_cmd() local
998 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_cq_create_cmd()
1013 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_cq_create_cmd()
1027 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_qp_create_cmd() local
1028 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_qp_create_cmd()
1048 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_qp_create_cmd()
1056 * @rf: RDMA PCI function
1060 irdma_dealloc_push_page(struct irdma_pci_f *rf, in irdma_dealloc_push_page() argument
1071 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, false); in irdma_dealloc_push_page()
1082 cqp_info->in.u.manage_push_page.cqp = &rf->cqp.sc_cqp; in irdma_dealloc_push_page()
1084 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_dealloc_push_page()
1087 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_dealloc_push_page()
1092 * @rf: RDMA PCI function
1096 irdma_cq_wq_destroy(struct irdma_pci_f *rf, struct irdma_sc_cq *cq) in irdma_cq_wq_destroy() argument
1101 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cq_wq_destroy()
1111 irdma_handle_cqp_op(rf, cqp_request); in irdma_cq_wq_destroy()
1112 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cq_wq_destroy()
1143 struct irdma_pci_f *rf = iwdev->rf; in irdma_hw_modify_qp() local
1148 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, wait); in irdma_hw_modify_qp()
1163 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_hw_modify_qp()
1164 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_hw_modify_qp()
1183 irdma_gen_ae(rf, &iwqp->sc_qp, &ae_info, false); in irdma_hw_modify_qp()
1185 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, in irdma_hw_modify_qp()
1199 irdma_handle_cqp_op(rf, cqp_request); in irdma_hw_modify_qp()
1200 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_hw_modify_qp()
1220 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_cq_destroy_cmd() local
1222 irdma_cq_wq_destroy(rf, cq); in irdma_cqp_cq_destroy_cmd()
1233 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_qp_destroy_cmd() local
1234 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_qp_destroy_cmd()
1251 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_qp_destroy_cmd()
1252 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_qp_destroy_cmd()
1266 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_ieq_mpa_crc_ae() local
1268 irdma_debug(&rf->sc_dev, IRDMA_DEBUG_AEQ, "Generate MPA CRC AE\n"); in irdma_ieq_mpa_crc_ae()
1271 irdma_gen_ae(rf, qp, &info, false); in irdma_ieq_mpa_crc_ae()
1590 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_gather_stats_cmd() local
1591 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_gather_stats_cmd()
1606 cqp_info->in.u.stats_gather.cqp = &rf->cqp.sc_cqp; in irdma_cqp_gather_stats_cmd()
1610 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_gather_stats_cmd()
1613 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_gather_stats_cmd()
1628 struct irdma_pci_f *rf = dev_to_rf(vsi->dev); in irdma_cqp_stats_inst_cmd() local
1629 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_stats_inst_cmd()
1647 cqp_info->in.u.stats_manage.cqp = &rf->cqp.sc_cqp; in irdma_cqp_stats_inst_cmd()
1648 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_stats_inst_cmd()
1668 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_ceq_cmd() local
1671 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_ceq_cmd()
1681 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_ceq_cmd()
1682 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_ceq_cmd()
1699 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_aeq_cmd() local
1702 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, true); in irdma_cqp_aeq_cmd()
1712 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_aeq_cmd()
1713 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_aeq_cmd()
1728 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_cqp_ws_node_cmd() local
1729 struct irdma_cqp *iwcqp = &rf->cqp; in irdma_cqp_ws_node_cmd()
1736 if (!rf->sc_dev.ceq_valid) in irdma_cqp_ws_node_cmd()
1752 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_cqp_ws_node_cmd()
1762 irdma_debug(&rf->sc_dev, IRDMA_DEBUG_DCB, in irdma_cqp_ws_node_cmd()
1770 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_cqp_ws_node_cmd()
1777 * @rf: RDMA PCI function
1787 irdma_ah_cqp_op(struct irdma_pci_f *rf, struct irdma_sc_ah *sc_ah, u8 cmd, in irdma_ah_cqp_op() argument
1799 cqp_request = irdma_alloc_and_get_cqp_request(&rf->cqp, wait); in irdma_ah_cqp_op()
1813 cqp_info->in.u.ah_create.cqp = &rf->cqp.sc_cqp; in irdma_ah_cqp_op()
1817 cqp_info->in.u.ah_destroy.cqp = &rf->cqp.sc_cqp; in irdma_ah_cqp_op()
1824 status = irdma_handle_cqp_op(rf, cqp_request); in irdma_ah_cqp_op()
1825 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_ah_cqp_op()
1889 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_puda_create_ah() local
1897 err = irdma_alloc_rsrc(rf, rf->allocated_ahs, rf->max_ah, in irdma_puda_create_ah()
1898 &ah_info->ah_idx, &rf->next_ah); in irdma_puda_create_ah()
1906 err = irdma_ah_cqp_op(rf, ah, IRDMA_OP_AH_CREATE, wait, in irdma_puda_create_ah()
1909 err = irdma_ah_cqp_op(rf, ah, IRDMA_OP_AH_CREATE, wait, in irdma_puda_create_ah()
1917 irdma_free_rsrc(rf, rf->allocated_ahs, ah->ah_info.ah_idx); in irdma_puda_create_ah()
1932 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_puda_free_ah() local
1938 irdma_ah_cqp_op(rf, ah, IRDMA_OP_AH_DESTROY, false, NULL, NULL); in irdma_puda_free_ah()
1939 irdma_free_rsrc(rf, rf->allocated_ahs, ah->ah_info.ah_idx); in irdma_puda_free_ah()
2152 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_alloc_ws_node_id() local
2156 if (irdma_alloc_rsrc(rf, rf->allocated_ws_nodes, rf->max_ws_node_id, in irdma_alloc_ws_node_id()
2171 struct irdma_pci_f *rf = dev_to_rf(dev); in irdma_free_ws_node_id() local
2173 irdma_free_rsrc(rf, rf->allocated_ws_nodes, (u32)node_id); in irdma_free_ws_node_id()
2186 if (qp->iwdev->rf->reset) in irdma_modify_qp_to_err()
2256 struct irdma_pci_f *rf; in irdma_upload_qp_context() local
2260 rf = iwqp->iwdev->rf; in irdma_upload_qp_context()
2261 if (!rf) in irdma_upload_qp_context()
2265 dev = &rf->sc_dev; in irdma_upload_qp_context()
2266 iwcqp = &rf->cqp; in irdma_upload_qp_context()
2283 irdma_put_cqp_request(&rf->cqp, cqp_request); in irdma_upload_qp_context()
2293 ret = irdma_handle_cqp_op(rf, cqp_request); in irdma_upload_qp_context()