Lines Matching refs:rxq
135 static int qlnx_alloc_rx_buffer(qlnx_host_t *ha, struct qlnx_rx_queue *rxq);
136 static void qlnx_reuse_rx_data(struct qlnx_rx_queue *rxq);
138 struct qlnx_rx_queue *rxq);
1464 if (fp->rxq->handle != NULL) { in qlnx_set_rx_coalesce()
1466 0, fp->rxq->handle); in qlnx_set_rx_coalesce()
3852 struct qlnx_rx_queue *rxq; in qlnx_rx_jumbo_chain() local
3855 rxq = fp->rxq; in qlnx_rx_jumbo_chain()
3859 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_rx_jumbo_chain()
3861 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_rx_jumbo_chain()
3867 rxq->sw_rx_cons = in qlnx_rx_jumbo_chain()
3868 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_rx_jumbo_chain()
3878 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_rx_jumbo_chain()
3882 qlnx_reuse_rx_data(rxq); in qlnx_rx_jumbo_chain()
3890 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_rx_jumbo_chain()
3892 if (len > rxq->rx_buf_size) in qlnx_rx_jumbo_chain()
3893 len_in_buffer = rxq->rx_buf_size; in qlnx_rx_jumbo_chain()
3920 struct qlnx_rx_queue *rxq, in qlnx_tpa_start() argument
3965 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_tpa_start()
3974 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
3982 rxq->sw_rx_cons, le16toh(cqe->pars_flags.flags)); in qlnx_tpa_start()
3986 qlnx_reuse_rx_data(rxq); in qlnx_tpa_start()
3993 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_tpa_start()
4009 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_prod]; in qlnx_tpa_start()
4011 sw_rx_data->data = rxq->tpa_info[agg_index].rx_buf.data; in qlnx_tpa_start()
4012 sw_rx_data->dma_addr = rxq->tpa_info[agg_index].rx_buf.dma_addr; in qlnx_tpa_start()
4013 sw_rx_data->map = rxq->tpa_info[agg_index].rx_buf.map; in qlnx_tpa_start()
4015 rxq->tpa_info[agg_index].rx_buf.data = mp; in qlnx_tpa_start()
4016 rxq->tpa_info[agg_index].rx_buf.dma_addr = addr; in qlnx_tpa_start()
4017 rxq->tpa_info[agg_index].rx_buf.map = map; in qlnx_tpa_start()
4020 ecore_chain_produce(&rxq->rx_bd_ring); in qlnx_tpa_start()
4028 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4029 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4031 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4038 qlnx_reuse_rx_data(rxq); in qlnx_tpa_start()
4041 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_ERROR; in qlnx_tpa_start()
4045 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_NONE) { in qlnx_tpa_start()
4053 if (rxq->tpa_info[agg_index].mpf) { in qlnx_tpa_start()
4054 m_freem(rxq->tpa_info[agg_index].mpf); in qlnx_tpa_start()
4055 rxq->tpa_info[agg_index].mpl = NULL; in qlnx_tpa_start()
4057 rxq->tpa_info[agg_index].mpf = mp; in qlnx_tpa_start()
4058 rxq->tpa_info[agg_index].mpl = NULL; in qlnx_tpa_start()
4060 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4061 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4068 qlnx_reuse_rx_data(rxq); in qlnx_tpa_start()
4070 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_ERROR; in qlnx_tpa_start()
4079 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4080 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4088 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_tpa_start()
4100 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_start()
4102 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4103 rxq->sw_rx_cons = in qlnx_tpa_start()
4104 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4108 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_tpa_start()
4113 qlnx_reuse_rx_data(rxq); in qlnx_tpa_start()
4119 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_start()
4122 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4123 rxq->sw_rx_cons = in qlnx_tpa_start()
4124 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4141 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_start()
4142 rxq->sw_rx_cons = in qlnx_tpa_start()
4143 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_start()
4146 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_NONE) { in qlnx_tpa_start()
4153 rxq->tpa_info[agg_index].mpf = mp; in qlnx_tpa_start()
4154 rxq->tpa_info[agg_index].mpl = NULL; in qlnx_tpa_start()
4159 rxq->tpa_info[agg_index].placement_offset = cqe->placement_offset; in qlnx_tpa_start()
4164 rxq->tpa_info[agg_index].mpf = mp; in qlnx_tpa_start()
4165 rxq->tpa_info[agg_index].mpl = mpl; in qlnx_tpa_start()
4168 rxq->tpa_info[agg_index].mpf = mp; in qlnx_tpa_start()
4169 rxq->tpa_info[agg_index].mpl = mp; in qlnx_tpa_start()
4220 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_START; in qlnx_tpa_start()
4223 fp->rss_id, rxq->tpa_info[agg_index].agg_state, in qlnx_tpa_start()
4224 rxq->tpa_info[agg_index].mpf, rxq->tpa_info[agg_index].mpl); in qlnx_tpa_start()
4231 struct qlnx_rx_queue *rxq, in qlnx_tpa_cont() argument
4267 if (rxq->tpa_info[agg_index].agg_state != in qlnx_tpa_cont()
4269 qlnx_reuse_rx_data(rxq); in qlnx_tpa_cont()
4273 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_tpa_cont()
4286 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_cont()
4288 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_cont()
4289 rxq->sw_rx_cons = in qlnx_tpa_cont()
4290 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_cont()
4294 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_tpa_cont()
4299 qlnx_reuse_rx_data(rxq); in qlnx_tpa_cont()
4305 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_cont()
4308 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_cont()
4309 rxq->sw_rx_cons = in qlnx_tpa_cont()
4310 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_cont()
4327 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_cont()
4328 rxq->sw_rx_cons = in qlnx_tpa_cont()
4329 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_cont()
4336 mp = rxq->tpa_info[agg_index].mpl; in qlnx_tpa_cont()
4339 rxq->tpa_info[agg_index].mpl = mpl; in qlnx_tpa_cont()
4347 struct qlnx_rx_queue *rxq, in qlnx_tpa_end() argument
4391 if (rxq->tpa_info[agg_index].agg_state != in qlnx_tpa_end()
4395 qlnx_reuse_rx_data(rxq); in qlnx_tpa_end()
4399 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_tpa_end()
4412 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_end()
4414 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_end()
4415 rxq->sw_rx_cons = in qlnx_tpa_end()
4416 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_end()
4420 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_tpa_end()
4425 qlnx_reuse_rx_data(rxq); in qlnx_tpa_end()
4431 rxq->tpa_info[agg_index].agg_state = in qlnx_tpa_end()
4434 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_end()
4435 rxq->sw_rx_cons = in qlnx_tpa_end()
4436 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_end()
4453 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_tpa_end()
4454 rxq->sw_rx_cons = in qlnx_tpa_end()
4455 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_tpa_end()
4463 mp = rxq->tpa_info[agg_index].mpl; in qlnx_tpa_end()
4468 if (rxq->tpa_info[agg_index].agg_state != QLNX_AGG_STATE_START) { in qlnx_tpa_end()
4471 if (rxq->tpa_info[agg_index].mpf != NULL) in qlnx_tpa_end()
4472 m_freem(rxq->tpa_info[agg_index].mpf); in qlnx_tpa_end()
4473 rxq->tpa_info[agg_index].mpf = NULL; in qlnx_tpa_end()
4474 rxq->tpa_info[agg_index].mpl = NULL; in qlnx_tpa_end()
4475 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_NONE; in qlnx_tpa_end()
4479 mp = rxq->tpa_info[agg_index].mpf; in qlnx_tpa_end()
4480 m_adj(mp, rxq->tpa_info[agg_index].placement_offset); in qlnx_tpa_end()
4494 mpl = rxq->tpa_info[agg_index].mpl; in qlnx_tpa_end()
4509 rxq->tpa_info[agg_index].mpf = NULL; in qlnx_tpa_end()
4510 rxq->tpa_info[agg_index].mpl = NULL; in qlnx_tpa_end()
4511 rxq->tpa_info[agg_index].agg_state = QLNX_AGG_STATE_NONE; in qlnx_tpa_end()
4522 struct qlnx_rx_queue *rxq = fp->rxq; in qlnx_rx_int() local
4530 lro = &rxq->lro; in qlnx_rx_int()
4533 hw_comp_cons = le16toh(*rxq->hw_cons_ptr); in qlnx_rx_int()
4534 sw_comp_cons = ecore_chain_get_cons_idx(&rxq->rx_comp_ring); in qlnx_rx_int()
4557 ecore_chain_consume(&rxq->rx_comp_ring); in qlnx_rx_int()
4571 qlnx_tpa_start(ha, fp, rxq, in qlnx_rx_int()
4577 qlnx_tpa_cont(ha, fp, rxq, in qlnx_rx_int()
4583 rx_pkt += qlnx_tpa_end(ha, fp, rxq, in qlnx_rx_int()
4596 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_rx_int()
4602 rxq->sw_rx_cons = in qlnx_rx_int()
4603 (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_rx_int()
4640 qlnx_reuse_rx_data(rxq); in qlnx_rx_int()
4647 if (qlnx_alloc_rx_buffer(ha, rxq) != 0) { in qlnx_rx_int()
4650 qlnx_reuse_rx_data(rxq); in qlnx_rx_int()
4659 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_rx_int()
4754 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_rx_int()
4757 ecore_chain_recycle_consumed(&rxq->rx_comp_ring); in qlnx_rx_int()
4758 sw_comp_cons = ecore_chain_get_cons_idx(&rxq->rx_comp_ring); in qlnx_rx_int()
4767 qlnx_update_rx_prod(p_hwfn, rxq); in qlnx_rx_int()
4862 lro = &fp->rxq->lro; in qlnx_fp_isr()
5835 fp->rxq = &ha->rxq_array[rss_id]; in qlnx_init_fp()
5836 fp->rxq->rxq_id = rss_id; in qlnx_init_fp()
5951 qlnx_free_rx_buffers(qlnx_host_t *ha, struct qlnx_rx_queue *rxq) in qlnx_free_rx_buffers() argument
5956 for (i = 0; i < rxq->num_rx_buffers; i++) { in qlnx_free_rx_buffers()
5957 rx_buf = &rxq->sw_rx_ring[i]; in qlnx_free_rx_buffers()
5973 qlnx_free_mem_rxq(qlnx_host_t *ha, struct qlnx_rx_queue *rxq) in qlnx_free_mem_rxq() argument
5980 qlnx_free_rx_buffers(ha, rxq); in qlnx_free_mem_rxq()
5983 qlnx_free_tpa_mbuf(ha, &rxq->tpa_info[i]); in qlnx_free_mem_rxq()
5984 if (rxq->tpa_info[i].mpf != NULL) in qlnx_free_mem_rxq()
5985 m_freem(rxq->tpa_info[i].mpf); in qlnx_free_mem_rxq()
5988 bzero((void *)&rxq->sw_rx_ring[0], in qlnx_free_mem_rxq()
5992 if (rxq->rx_bd_ring.p_virt_addr) { in qlnx_free_mem_rxq()
5993 ecore_chain_free(cdev, &rxq->rx_bd_ring); in qlnx_free_mem_rxq()
5994 rxq->rx_bd_ring.p_virt_addr = NULL; in qlnx_free_mem_rxq()
5998 if (rxq->rx_comp_ring.p_virt_addr && in qlnx_free_mem_rxq()
5999 rxq->rx_comp_ring.pbl_sp.p_virt_table) { in qlnx_free_mem_rxq()
6000 ecore_chain_free(cdev, &rxq->rx_comp_ring); in qlnx_free_mem_rxq()
6001 rxq->rx_comp_ring.p_virt_addr = NULL; in qlnx_free_mem_rxq()
6002 rxq->rx_comp_ring.pbl_sp.p_virt_table = NULL; in qlnx_free_mem_rxq()
6009 lro = &rxq->lro; in qlnx_free_mem_rxq()
6018 qlnx_alloc_rx_buffer(qlnx_host_t *ha, struct qlnx_rx_queue *rxq) in qlnx_alloc_rx_buffer() argument
6030 rx_buf_size = rxq->rx_buf_size; in qlnx_alloc_rx_buffer()
6054 sw_rx_data = &rxq->sw_rx_ring[rxq->sw_rx_prod]; in qlnx_alloc_rx_buffer()
6060 rx_bd = (struct eth_rx_bd *)ecore_chain_produce(&rxq->rx_bd_ring); in qlnx_alloc_rx_buffer()
6065 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1); in qlnx_alloc_rx_buffer()
6138 qlnx_alloc_mem_rxq(qlnx_host_t *ha, struct qlnx_rx_queue *rxq) in qlnx_alloc_mem_rxq() argument
6145 rxq->num_rx_buffers = RX_RING_SIZE; in qlnx_alloc_mem_rxq()
6147 rxq->rx_buf_size = ha->rx_buf_size; in qlnx_alloc_mem_rxq()
6150 bzero((void *)&rxq->sw_rx_ring[0], in qlnx_alloc_mem_rxq()
6161 &rxq->rx_bd_ring, NULL); in qlnx_alloc_mem_rxq()
6173 &rxq->rx_comp_ring, NULL); in qlnx_alloc_mem_rxq()
6181 rc = qlnx_alloc_tpa_mbuf(ha, rxq->rx_buf_size, in qlnx_alloc_mem_rxq()
6182 &rxq->tpa_info[i]); in qlnx_alloc_mem_rxq()
6187 for (i = 0; i < rxq->num_rx_buffers; i++) { in qlnx_alloc_mem_rxq()
6188 rc = qlnx_alloc_rx_buffer(ha, rxq); in qlnx_alloc_mem_rxq()
6196 } else if (num_allocated < rxq->num_rx_buffers) { in qlnx_alloc_mem_rxq()
6206 lro = &rxq->lro; in qlnx_alloc_mem_rxq()
6208 if (tcp_lro_init_args(lro, ifp, 0, rxq->num_rx_buffers)) { in qlnx_alloc_mem_rxq()
6210 rxq->rxq_id); in qlnx_alloc_mem_rxq()
6220 qlnx_free_mem_rxq(ha, rxq); in qlnx_alloc_mem_rxq()
6311 qlnx_free_mem_rxq(ha, fp->rxq); in qlnx_free_mem_fp()
6362 rc = qlnx_alloc_mem_rxq(ha, fp->rxq); in qlnx_alloc_mem_fp()
6509 rss->rss_ind_table[j] = fp->rxq->handle; in qlnx_update_vport()
6549 qlnx_reuse_rx_data(struct qlnx_rx_queue *rxq) in qlnx_reuse_rx_data() argument
6552 ecore_chain_consume(&rxq->rx_bd_ring); in qlnx_reuse_rx_data()
6554 ecore_chain_produce(&rxq->rx_bd_ring); in qlnx_reuse_rx_data()
6556 &rxq->sw_rx_ring[rxq->sw_rx_cons]; in qlnx_reuse_rx_data()
6558 &rxq->sw_rx_ring[rxq->sw_rx_prod]; in qlnx_reuse_rx_data()
6563 rxq->sw_rx_cons = (rxq->sw_rx_cons + 1) & (RX_RING_SIZE - 1); in qlnx_reuse_rx_data()
6564 rxq->sw_rx_prod = (rxq->sw_rx_prod + 1) & (RX_RING_SIZE - 1); in qlnx_reuse_rx_data()
6570 qlnx_update_rx_prod(struct ecore_hwfn *p_hwfn, struct qlnx_rx_queue *rxq) in qlnx_update_rx_prod() argument
6580 bd_prod = ecore_chain_get_prod_idx(&rxq->rx_bd_ring); in qlnx_update_rx_prod()
6581 cqe_prod = ecore_chain_get_prod_idx(&rxq->rx_comp_ring); in qlnx_update_rx_prod()
6594 internal_ram_wr(p_hwfn, rxq->hw_rxq_prod_addr, in qlnx_update_rx_prod()
6597 internal_ram_wr(rxq->hw_rxq_prod_addr, in qlnx_update_rx_prod()
6686 fp->rxq->rx_buf_size, /* bd_max_bytes */ in qlnx_start_queues()
6688 fp->rxq->rx_bd_ring.p_phys_addr, in qlnx_start_queues()
6690 ecore_chain_get_pbl_phys(&fp->rxq->rx_comp_ring), in qlnx_start_queues()
6692 ecore_chain_get_page_cnt(&fp->rxq->rx_comp_ring), in qlnx_start_queues()
6700 fp->rxq->hw_rxq_prod_addr = rx_ret_params.p_prod; in qlnx_start_queues()
6701 fp->rxq->handle = rx_ret_params.p_handle; in qlnx_start_queues()
6702 fp->rxq->hw_cons_ptr = in qlnx_start_queues()
6705 qlnx_update_rx_prod(p_hwfn, fp->rxq); in qlnx_start_queues()
6765 rss_params->rss_ind_table[i] = fp->rxq->handle; in qlnx_start_queues()
6913 rc = ecore_eth_rx_queue_stop(p_hwfn, fp->rxq->handle, false, in qlnx_stop_queues()
7309 lro = &fp->rxq->lro; in qlnx_drain_soft_lro()