Searched refs:max_tx_queues (Results 1 – 9 of 9) sorted by relevance
84 hw->mac.max_tx_queues = 1; in ixgbe_init_ops_vf()269 for (i = 0; i < hw->mac.max_tx_queues; i++) in ixgbe_stop_adapter_vf()795 hw->mac.max_tx_queues = msg[IXGBE_VF_TX_QUEUES]; in ixgbevf_get_queues()796 if (hw->mac.max_tx_queues == 0 || in ixgbevf_get_queues()797 hw->mac.max_tx_queues > IXGBE_VF_MAX_TX_QUEUES) in ixgbevf_get_queues()798 hw->mac.max_tx_queues = IXGBE_VF_MAX_TX_QUEUES; in ixgbevf_get_queues()812 if (*default_tc >= hw->mac.max_tx_queues) in ixgbevf_get_queues()
160 mac->max_tx_queues = IXGBE_82598_MAX_TX_QUEUES; in ixgbe_init_ops_82598()267 for (i = 0; ((i < hw->mac.max_tx_queues) && in ixgbe_start_hw_82598()1370 for (i = 0; ((i < hw->mac.max_tx_queues) && in ixgbe_enable_relaxed_ordering_82598()
138 mac->max_tx_queues = IXGBE_X540_MAX_TX_QUEUES; in ixgbe_init_ops_X540()
469 for (i = 0; i < hw->mac.max_tx_queues; i++) { in ixgbe_start_hw_gen2()476 for (i = 0; i < hw->mac.max_tx_queues; i++) { in ixgbe_start_hw_gen2()1143 for (i = 0; i < hw->mac.max_tx_queues; i++) in ixgbe_stop_adapter_generic()4540 for (i = 0; i < hw->mac.max_tx_queues; i++) { in ixgbe_enable_relaxed_ordering_gen2()
459 return hw->mac.max_tx_queues; in ixgbe_get_num_of_tx_queues()
383 mac->max_tx_queues = IXGBE_82599_MAX_TX_QUEUES; in ixgbe_init_ops_82599()
4225 u32 max_tx_queues; member
937 void ice_alloc_vsi_qmap(struct ice_vsi *vsi, const int max_tx_queues,
455 ice_alloc_vsi_qmap(struct ice_vsi *vsi, const int max_tx_queues, in ice_alloc_vsi_qmap() argument460 MPASS(max_tx_queues > 0); in ice_alloc_vsi_qmap()464 vsi->tx_qmap = malloc(sizeof(u16) * max_tx_queues, M_ICE, M_WAITOK); in ice_alloc_vsi_qmap()470 for (i = 0; i < max_tx_queues; i++) { in ice_alloc_vsi_qmap()