Lines Matching refs:que
458 struct ix_tx_queue *que; in ixgbe_if_tx_queues_alloc() local
475 for (i = 0, que = sc->tx_queues; i < ntxqsets; i++, que++) { in ixgbe_if_tx_queues_alloc()
476 struct tx_ring *txr = &que->txr; in ixgbe_if_tx_queues_alloc()
481 txr->sc = que->sc = sc; in ixgbe_if_tx_queues_alloc()
525 struct ix_rx_queue *que; in ixgbe_if_rx_queues_alloc() local
542 for (i = 0, que = sc->rx_queues; i < nrxqsets; i++, que++) { in ixgbe_if_rx_queues_alloc()
543 struct rx_ring *rxr = &que->rxr; in ixgbe_if_rx_queues_alloc()
548 rxr->sc = que->sc = sc; in ixgbe_if_rx_queues_alloc()
555 rxr->que = que; in ixgbe_if_rx_queues_alloc()
718 struct ix_rx_queue *que; in ixgbe_initialize_receive_units() local
750 for (i = 0, que = sc->rx_queues; i < sc->num_rx_queues; i++, que++) { in ixgbe_initialize_receive_units()
751 struct rx_ring *rxr = &que->rxr; in ixgbe_initialize_receive_units()
830 struct ix_tx_queue *que; in ixgbe_initialize_transmit_units() local
834 for (i = 0, que = sc->tx_queues; i < sc->num_tx_queues; in ixgbe_initialize_transmit_units()
835 i++, que++) { in ixgbe_initialize_transmit_units()
836 struct tx_ring *txr = &que->txr; in ixgbe_initialize_transmit_units()
2264 ixgbe_perform_aim(struct ixgbe_softc *sc, struct ix_rx_queue *que) in ixgbe_perform_aim() argument
2267 struct rx_ring *rxr = &que->rxr; in ixgbe_perform_aim()
2276 if (que->eitr_setting) { in ixgbe_perform_aim()
2277 IXGBE_WRITE_REG(&sc->hw, IXGBE_EITR(que->msix), in ixgbe_perform_aim()
2278 que->eitr_setting); in ixgbe_perform_aim()
2281 que->eitr_setting = 0; in ixgbe_perform_aim()
2315 que->eitr_setting = newitr; in ixgbe_perform_aim()
2332 struct ix_rx_queue *que = arg; in ixgbe_msix_que() local
2333 struct ixgbe_softc *sc = que->sc; in ixgbe_msix_que()
2334 if_t ifp = iflib_get_ifp(que->sc->ctx); in ixgbe_msix_que()
2340 ixgbe_disable_queue(sc, que->msix); in ixgbe_msix_que()
2341 ++que->irqs; in ixgbe_msix_que()
2345 ixgbe_perform_aim(sc, que); in ixgbe_msix_que()
2785 struct ix_rx_queue *que = ((struct ix_rx_queue *)oidp->oid_arg1); in ixgbe_sysctl_interrupt_rate_handler() local
2789 if (atomic_load_acq_int(&que->sc->recovery_mode)) in ixgbe_sysctl_interrupt_rate_handler()
2792 reg = IXGBE_READ_REG(&que->sc->hw, IXGBE_EITR(que->msix)); in ixgbe_sysctl_interrupt_rate_handler()
2809 IXGBE_WRITE_REG(&que->sc->hw, IXGBE_EITR(que->msix), reg); in ixgbe_sysctl_interrupt_rate_handler()
3951 struct ix_rx_queue *que = sc->rx_queues; in ixgbe_if_enable_intr() local
4021 for (int i = 0; i < sc->num_rx_queues; i++, que++) in ixgbe_if_enable_intr()
4022 ixgbe_enable_queue(sc, que->msix); in ixgbe_if_enable_intr()
4069 struct ix_rx_queue *que = &sc->rx_queues[rxqid]; in ixgbe_if_rx_queue_intr_enable() local
4071 ixgbe_enable_queue(sc, que->msix); in ixgbe_if_rx_queue_intr_enable()
4129 struct ix_rx_queue *que = sc->rx_queues; in ixgbe_intr() local
4136 ++que->irqs; in ixgbe_intr()
4192 struct ix_rx_queue *que = sc->rx_queues; in ixgbe_free_pci_resources() local
4199 if (que != NULL) { in ixgbe_free_pci_resources()
4200 for (int i = 0; i < sc->num_rx_queues; i++, que++) { in ixgbe_free_pci_resources()
4201 iflib_irq_free(ctx, &que->que_irq); in ixgbe_free_pci_resources()