Searched refs:max_rx_queues (Results 1 – 10 of 10) sorted by relevance
85 hw->mac.max_rx_queues = 1; in ixgbe_init_ops_vf()273 for (i = 0; i < hw->mac.max_rx_queues; i++) { in ixgbe_stop_adapter_vf()800 hw->mac.max_rx_queues = msg[IXGBE_VF_RX_QUEUES]; in ixgbevf_get_queues()801 if (hw->mac.max_rx_queues == 0 || in ixgbevf_get_queues()802 hw->mac.max_rx_queues > IXGBE_VF_MAX_RX_QUEUES) in ixgbevf_get_queues()803 hw->mac.max_rx_queues = IXGBE_VF_MAX_RX_QUEUES; in ixgbevf_get_queues()807 if (*num_tcs > hw->mac.max_rx_queues) in ixgbevf_get_queues()
159 mac->max_rx_queues = IXGBE_82598_MAX_RX_QUEUES; in ixgbe_init_ops_82598()274 for (i = 0; ((i < hw->mac.max_rx_queues) && in ixgbe_start_hw_82598()1377 for (i = 0; ((i < hw->mac.max_rx_queues) && in ixgbe_enable_relaxed_ordering_82598()
137 mac->max_rx_queues = IXGBE_X540_MAX_RX_QUEUES; in ixgbe_init_ops_X540()
470 return hw->mac.max_rx_queues; in ixgbe_get_num_of_rx_queues()
482 for (i = 0; i < hw->mac.max_rx_queues; i++) { in ixgbe_start_hw_gen2()1147 for (i = 0; i < hw->mac.max_rx_queues; i++) { in ixgbe_stop_adapter_generic()4546 for (i = 0; i < hw->mac.max_rx_queues; i++) { in ixgbe_enable_relaxed_ordering_gen2()
382 mac->max_rx_queues = IXGBE_82599_MAX_RX_QUEUES; in ixgbe_init_ops_82599()
4226 u32 max_rx_queues; member
938 const int max_rx_queues);
456 const int max_rx_queues) in ice_alloc_vsi_qmap() argument461 MPASS(max_rx_queues > 0); in ice_alloc_vsi_qmap()467 vsi->rx_qmap = malloc(sizeof(u16) * max_rx_queues, M_ICE, M_WAITOK); in ice_alloc_vsi_qmap()473 for (i = 0; i < max_rx_queues; i++) { in ice_alloc_vsi_qmap()
1686 u32 max_rx_queues = IWL_MAX_RX_HW_QUEUES; in iwl_pcie_set_interrupt_capa() 1692 max_rx_queues = IWL_9000_MAX_RX_HW_QUEUES; in iwl_pcie_set_interrupt_capa() 1694 max_irqs = min_t(u32, num_online_cpus() + 2, max_rx_queues); in iwl_pcie_set_interrupt_capa() 1676 u32 max_rx_queues = IWL_MAX_RX_HW_QUEUES; iwl_pcie_set_interrupt_capa() local