Lines Matching refs:vsi

123 static void ice_init_tx_tracking(struct ice_vsi *vsi);
897 struct ice_vsi *vsi = &sc->pf_vsi; in ice_free_irqvs() local
910 for (i = 0; i < vsi->num_rx_queues; i++) in ice_free_irqvs()
911 vsi->rx_queues[i].irqv = NULL; in ice_free_irqvs()
913 for (i = 0; i < vsi->num_tx_queues; i++) in ice_free_irqvs()
914 vsi->tx_queues[i].irqv = NULL; in ice_free_irqvs()
936 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_detach() local
964 ice_vsi_del_txqs_ctx(vsi); in ice_if_detach()
965 ice_vsi_del_rxqs_ctx(vsi); in ice_if_detach()
1029 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_tx_queues_alloc() local
1042 if (!(vsi->tx_queues = in ice_if_tx_queues_alloc()
1049 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_if_tx_queues_alloc()
1062 err = ice_resmgr_assign_contiguous(&sc->tx_qmgr, vsi->tx_qmap, ntxqsets); in ice_if_tx_queues_alloc()
1068 vsi->qmap_type = ICE_RESMGR_ALLOC_CONTIGUOUS; in ice_if_tx_queues_alloc()
1071 ice_vsi_add_txqs_ctx(vsi); in ice_if_tx_queues_alloc()
1073 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_if_tx_queues_alloc()
1076 txq->vsi = vsi; in ice_if_tx_queues_alloc()
1082 txq->tail = QTX_COMM_DBELL(vsi->tx_qmap[i]); in ice_if_tx_queues_alloc()
1089 vsi->num_tx_queues = ntxqsets; in ice_if_tx_queues_alloc()
1094 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_if_tx_queues_alloc()
1100 free(vsi->tx_queues, M_ICE); in ice_if_tx_queues_alloc()
1101 vsi->tx_queues = NULL; in ice_if_tx_queues_alloc()
1121 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_rx_queues_alloc() local
1134 if (!(vsi->rx_queues = in ice_if_rx_queues_alloc()
1141 err = ice_resmgr_assign_contiguous(&sc->rx_qmgr, vsi->rx_qmap, nrxqsets); in ice_if_rx_queues_alloc()
1147 vsi->qmap_type = ICE_RESMGR_ALLOC_CONTIGUOUS; in ice_if_rx_queues_alloc()
1150 ice_vsi_add_rxqs_ctx(vsi); in ice_if_rx_queues_alloc()
1152 for (i = 0, rxq = vsi->rx_queues; i < nrxqsets; i++, rxq++) { in ice_if_rx_queues_alloc()
1154 rxq->vsi = vsi; in ice_if_rx_queues_alloc()
1160 rxq->tail = QRX_TAIL(vsi->rx_qmap[i]); in ice_if_rx_queues_alloc()
1167 vsi->num_rx_queues = nrxqsets; in ice_if_rx_queues_alloc()
1172 free(vsi->rx_queues, M_ICE); in ice_if_rx_queues_alloc()
1173 vsi->rx_queues = NULL; in ice_if_rx_queues_alloc()
1198 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_queues_free() local
1207 ice_vsi_del_txqs_ctx(vsi); in ice_if_queues_free()
1208 ice_vsi_del_rxqs_ctx(vsi); in ice_if_queues_free()
1213 if (vsi->tx_queues != NULL) { in ice_if_queues_free()
1215 for (i = 0, txq = vsi->tx_queues; i < vsi->num_tx_queues; i++, txq++) { in ice_if_queues_free()
1221 free(vsi->tx_queues, M_ICE); in ice_if_queues_free()
1222 vsi->tx_queues = NULL; in ice_if_queues_free()
1223 vsi->num_tx_queues = 0; in ice_if_queues_free()
1225 if (vsi->rx_queues != NULL) { in ice_if_queues_free()
1226 free(vsi->rx_queues, M_ICE); in ice_if_queues_free()
1227 vsi->rx_queues = NULL; in ice_if_queues_free()
1228 vsi->num_rx_queues = 0; in ice_if_queues_free()
1599 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_msix_intr_assign() local
1604 if (vsi->num_rx_queues != vsi->num_tx_queues) { in ice_if_msix_intr_assign()
1607 vsi->num_tx_queues, vsi->num_rx_queues); in ice_if_msix_intr_assign()
1611 if (msix < (vsi->num_rx_queues + 1)) { in ice_if_msix_intr_assign()
1618 sc->num_irq_vectors = vsi->num_rx_queues + 1; in ice_if_msix_intr_assign()
1645 for (i = 0, vector = 1; i < vsi->num_rx_queues; i++, vector++) { in ice_if_msix_intr_assign()
1646 struct ice_rx_queue *rxq = &vsi->rx_queues[i]; in ice_if_msix_intr_assign()
1647 struct ice_tx_queue *txq = &vsi->tx_queues[i]; in ice_if_msix_intr_assign()
1731 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_intr_enable() local
1744 for (int i = 0; i < vsi->num_rx_queues; i++) in ice_if_intr_enable()
1745 ice_enable_intr(hw, vsi->rx_queues[i].irqv->me); in ice_if_intr_enable()
1789 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_rx_queue_intr_enable() local
1796 ice_enable_intr(hw, vsi->rx_queues[rxqid].irqv->me); in ice_if_rx_queue_intr_enable()
1813 struct ice_vsi *vsi = &sc->pf_vsi; in ice_if_tx_queue_intr_enable() local
1820 ice_enable_intr(hw, vsi->tx_queues[txqid].irqv->me); in ice_if_tx_queue_intr_enable()
1962 ice_init_tx_tracking(struct ice_vsi *vsi) in ice_init_tx_tracking() argument
1968 for (i = 0, txq = vsi->tx_queues; i < vsi->num_tx_queues; i++, txq++) { in ice_init_tx_tracking()
1995 struct ice_vsi *vsi = &sc->pf_vsi; in ice_update_rx_mbuf_sz() local
1998 vsi->mbuf_sz = mbuf_sz; in ice_update_rx_mbuf_sz()
2216 ice_update_vsi_hw_stats(sc->mirr_if->vsi); in ice_if_timer()
2266 struct ice_vsi *vsi = &sc->pf_vsi; in ice_transition_recovery_mode() local
2283 ice_vsi_del_txqs_ctx(vsi); in ice_transition_recovery_mode()
2284 ice_vsi_del_rxqs_ctx(vsi); in ice_transition_recovery_mode()
2461 ice_resmgr_release_map(&sc->tx_qmgr, sc->mirr_if->vsi->tx_qmap, in ice_prepare_for_reset()
2463 ice_resmgr_release_map(&sc->rx_qmgr, sc->mirr_if->vsi->rx_qmap, in ice_prepare_for_reset()
2485 struct ice_vsi *vsi = &sc->pf_vsi; in ice_rebuild_pf_vsi_qmap() local
2491 err = ice_resmgr_assign_contiguous(&sc->tx_qmgr, vsi->tx_qmap, in ice_rebuild_pf_vsi_qmap()
2492 vsi->num_tx_queues); in ice_rebuild_pf_vsi_qmap()
2500 err = ice_resmgr_assign_contiguous(&sc->rx_qmgr, vsi->rx_qmap, in ice_rebuild_pf_vsi_qmap()
2501 vsi->num_rx_queues); in ice_rebuild_pf_vsi_qmap()
2508 vsi->qmap_type = ICE_RESMGR_ALLOC_CONTIGUOUS; in ice_rebuild_pf_vsi_qmap()
2511 for (i = 0, txq = vsi->tx_queues; i < vsi->num_tx_queues; i++, txq++) in ice_rebuild_pf_vsi_qmap()
2512 txq->tail = QTX_COMM_DBELL(vsi->tx_qmap[i]); in ice_rebuild_pf_vsi_qmap()
2515 for (i = 0, rxq = vsi->rx_queues; i < vsi->num_rx_queues; i++, rxq++) in ice_rebuild_pf_vsi_qmap()
2516 rxq->tail = QRX_TAIL(vsi->rx_qmap[i]); in ice_rebuild_pf_vsi_qmap()
3514 struct ice_vsi *vsi = mif->vsi; in ice_destroy_mirror_interface() local
3558 ice_release_vsi(vsi); in ice_destroy_mirror_interface()
3579 struct ice_vsi *vsi; in ice_setup_mirror_vsi() local
3583 vsi = ice_alloc_vsi(sc, ICE_VSI_VMDQ2); in ice_setup_mirror_vsi()
3584 if (!vsi) { in ice_setup_mirror_vsi()
3588 mif->vsi = vsi; in ice_setup_mirror_vsi()
3591 ice_alloc_vsi_qmap(vsi, ICE_DEFAULT_VF_QUEUES, ICE_DEFAULT_VF_QUEUES); in ice_setup_mirror_vsi()
3592 vsi->num_tx_queues = vsi->num_rx_queues = ICE_DEFAULT_VF_QUEUES; in ice_setup_mirror_vsi()
3595 ret = ice_resmgr_assign_scattered(&sc->tx_qmgr, vsi->tx_qmap, in ice_setup_mirror_vsi()
3596 vsi->num_tx_queues); in ice_setup_mirror_vsi()
3603 ret = ice_resmgr_assign_scattered(&sc->rx_qmgr, vsi->rx_qmap, in ice_setup_mirror_vsi()
3604 vsi->num_rx_queues); in ice_setup_mirror_vsi()
3610 vsi->qmap_type = ICE_RESMGR_ALLOC_SCATTERED; in ice_setup_mirror_vsi()
3611 vsi->max_frame_size = ICE_MAX_FRAME_SIZE; in ice_setup_mirror_vsi()
3613 ret = ice_initialize_vsi(vsi); in ice_setup_mirror_vsi()
3621 ret = ice_config_rss(vsi); in ice_setup_mirror_vsi()
3630 vsi->mirror_src_vsi = sc->pf_vsi.idx; in ice_setup_mirror_vsi()
3634 vsi->mirror_src_vsi, vsi->idx); in ice_setup_mirror_vsi()
3636 ice_get_hw_vsi_num(&sc->hw, vsi->mirror_src_vsi), in ice_setup_mirror_vsi()
3637 ice_get_hw_vsi_num(&sc->hw, vsi->idx)); in ice_setup_mirror_vsi()
3639 ret = ice_setup_vsi_mirroring(vsi); in ice_setup_mirror_vsi()
3650 ice_release_vsi(vsi); in ice_setup_mirror_vsi()
3651 mif->vsi = NULL; in ice_setup_mirror_vsi()
3739 ice_add_vsi_sysctls(mif->vsi); in ice_create_mirror_interface()
3777 struct ice_vsi *vsi = mif->vsi; in ice_wire_mirror_intrs() local
3786 for (i = 0; i < vsi->num_rx_queues; i++, rid++) { in ice_wire_mirror_intrs()
3787 struct ice_rx_queue *rxq = &vsi->rx_queues[i]; in ice_wire_mirror_intrs()
3788 struct ice_tx_queue *txq = &vsi->tx_queues[i]; in ice_wire_mirror_intrs()
3840 struct ice_vsi *vsi = sc->mirr_if->vsi; in ice_subif_rebuild() local
3850 err = ice_initialize_vsi(vsi); in ice_subif_rebuild()
3857 err = ice_config_rss(vsi); in ice_subif_rebuild()
3865 vsi->mirror_src_vsi = sc->pf_vsi.idx; in ice_subif_rebuild()
3867 err = ice_setup_vsi_mirroring(vsi); in ice_subif_rebuild()
3880 ice_deinit_vsi(vsi); in ice_subif_rebuild()
3882 ice_resmgr_release_map(&sc->tx_qmgr, vsi->tx_qmap, in ice_subif_rebuild()
3884 ice_resmgr_release_map(&sc->rx_qmgr, vsi->rx_qmap, in ice_subif_rebuild()
3900 struct ice_vsi *vsi = sc->mirr_if->vsi; in ice_subif_rebuild_vsi_qmap() local
3905 err = ice_resmgr_assign_scattered(&sc->tx_qmgr, vsi->tx_qmap, sc->mirr_if->num_irq_vectors); in ice_subif_rebuild_vsi_qmap()
3912 err = ice_resmgr_assign_scattered(&sc->rx_qmgr, vsi->rx_qmap, sc->mirr_if->num_irq_vectors); in ice_subif_rebuild_vsi_qmap()
3919 vsi->qmap_type = ICE_RESMGR_ALLOC_SCATTERED; in ice_subif_rebuild_vsi_qmap()
3922 for (i = 0, txq = vsi->tx_queues; i < vsi->num_tx_queues; i++, txq++) in ice_subif_rebuild_vsi_qmap()
3923 txq->tail = QTX_COMM_DBELL(vsi->tx_qmap[i]); in ice_subif_rebuild_vsi_qmap()
3926 for (i = 0, rxq = vsi->rx_queues; i < vsi->num_rx_queues; i++, rxq++) in ice_subif_rebuild_vsi_qmap()
3927 rxq->tail = QRX_TAIL(vsi->rx_qmap[i]); in ice_subif_rebuild_vsi_qmap()
3932 ice_resmgr_release_map(&sc->tx_qmgr, vsi->tx_qmap, vsi->num_tx_queues); in ice_subif_rebuild_vsi_qmap()
3955 struct ice_vsi *vsi; in ice_subif_if_tx_queues_alloc() local
3962 vsi = mif->vsi; in ice_subif_if_tx_queues_alloc()
3964 MPASS(vsi->num_tx_queues == ntxqsets); in ice_subif_if_tx_queues_alloc()
3967 if (!(vsi->tx_queues = in ice_subif_if_tx_queues_alloc()
3975 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_subif_if_tx_queues_alloc()
3989 ice_vsi_add_txqs_ctx(vsi); in ice_subif_if_tx_queues_alloc()
3991 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_subif_if_tx_queues_alloc()
3994 txq->vsi = vsi; in ice_subif_if_tx_queues_alloc()
4000 txq->tail = QTX_COMM_DBELL(vsi->tx_qmap[i]); in ice_subif_if_tx_queues_alloc()
4010 for (i = 0, txq = vsi->tx_queues; i < ntxqsets; i++, txq++) { in ice_subif_if_tx_queues_alloc()
4016 free(vsi->tx_queues, M_ICE); in ice_subif_if_tx_queues_alloc()
4017 vsi->tx_queues = NULL; in ice_subif_if_tx_queues_alloc()
4039 struct ice_vsi *vsi; in ice_subif_if_rx_queues_alloc() local
4046 vsi = mif->vsi; in ice_subif_if_rx_queues_alloc()
4048 MPASS(vsi->num_rx_queues == nrxqsets); in ice_subif_if_rx_queues_alloc()
4051 if (!(vsi->rx_queues = in ice_subif_if_rx_queues_alloc()
4059 ice_vsi_add_rxqs_ctx(vsi); in ice_subif_if_rx_queues_alloc()
4061 for (i = 0, rxq = vsi->rx_queues; i < nrxqsets; i++, rxq++) { in ice_subif_if_rx_queues_alloc()
4063 rxq->vsi = vsi; in ice_subif_if_rx_queues_alloc()
4069 rxq->tail = QRX_TAIL(vsi->rx_qmap[i]); in ice_subif_if_rx_queues_alloc()
4093 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_msix_intr_assign() local
4098 if (vsi->num_rx_queues != vsi->num_tx_queues) { in ice_subif_if_msix_intr_assign()
4101 vsi->num_tx_queues, vsi->num_rx_queues); in ice_subif_if_msix_intr_assign()
4115 mif->num_irq_vectors = vsi->num_rx_queues; in ice_subif_if_msix_intr_assign()
4178 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_intr_enable() local
4186 for (int i = 0; i < vsi->num_rx_queues; i++) in ice_subif_if_intr_enable()
4187 ice_enable_intr(hw, vsi->rx_queues[i].irqv->me); in ice_subif_if_intr_enable()
4204 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_rx_queue_intr_enable() local
4211 ice_enable_intr(hw, vsi->rx_queues[rxqid].irqv->me); in ice_subif_if_rx_queue_intr_enable()
4229 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_tx_queue_intr_enable() local
4236 ice_enable_intr(hw, vsi->tx_queues[txqid].irqv->me); in ice_subif_if_tx_queue_intr_enable()
4254 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_init() local
4279 vsi->mbuf_sz = iflib_get_rx_mbuf_sz(ctx); in ice_subif_if_init()
4282 ice_init_tx_tracking(vsi); in ice_subif_if_init()
4284 err = ice_cfg_vsi_for_tx(vsi); in ice_subif_if_init()
4292 err = ice_cfg_vsi_for_rx(vsi); in ice_subif_if_init()
4300 err = ice_control_all_rx_queues(vsi, true); in ice_subif_if_init()
4308 ice_configure_all_rxq_interrupts(vsi); in ice_subif_if_init()
4309 ice_configure_rx_itr(vsi); in ice_subif_if_init()
4315 ice_vsi_disable_tx(vsi); in ice_subif_if_init()
4333 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_stop() local
4354 ice_flush_txq_interrupts(vsi); in ice_subif_if_stop()
4355 ice_flush_rxq_interrupts(vsi); in ice_subif_if_stop()
4358 ice_vsi_disable_tx(vsi); in ice_subif_if_stop()
4359 ice_control_all_rx_queues(vsi, false); in ice_subif_if_stop()
4372 struct ice_vsi *vsi = mif->vsi; in ice_free_irqvs_subif() local
4385 for (i = 0; i < vsi->num_rx_queues; i++) in ice_free_irqvs_subif()
4393 sc->last_rid -= vsi->num_rx_queues; in ice_free_irqvs_subif()
4396 for (i = 0; i < vsi->num_rx_queues; i++) in ice_free_irqvs_subif()
4397 vsi->rx_queues[i].irqv = NULL; in ice_free_irqvs_subif()
4399 for (i = 0; i < vsi->num_tx_queues; i++) in ice_free_irqvs_subif()
4400 vsi->tx_queues[i].irqv = NULL; in ice_free_irqvs_subif()
4418 struct ice_vsi *vsi = mif->vsi; in ice_subif_if_queues_free() local
4425 ice_vsi_del_txqs_ctx(vsi); in ice_subif_if_queues_free()
4426 ice_vsi_del_rxqs_ctx(vsi); in ice_subif_if_queues_free()
4431 if (vsi->tx_queues != NULL) { in ice_subif_if_queues_free()
4433 for (i = 0, txq = vsi->tx_queues; i < vsi->num_tx_queues; i++, txq++) { in ice_subif_if_queues_free()
4439 free(vsi->tx_queues, M_ICE); in ice_subif_if_queues_free()
4440 vsi->tx_queues = NULL; in ice_subif_if_queues_free()
4442 if (vsi->rx_queues != NULL) { in ice_subif_if_queues_free()
4443 free(vsi->rx_queues, M_ICE); in ice_subif_if_queues_free()
4444 vsi->rx_queues = NULL; in ice_subif_if_queues_free()