Lines Matching refs:cqe
291 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_lro_start() argument
296 lro_info = &rx_ring->lro_info[cqe->tpa_agg_index]; in qede_lro_start()
302 qede_dump_start_lro_cqe(cqe); in qede_lro_start()
314 lro_info->pars_flags = LE_16(cqe->pars_flags.flags); in qede_lro_start()
315 lro_info->pad = LE_16(cqe->placement_offset); in qede_lro_start()
316 lro_info->header_len = (uint32_t)cqe->header_len; in qede_lro_start()
317 lro_info->vlan_tag = LE_16(cqe->vlan_tag); in qede_lro_start()
318 lro_info->rss_hash = LE_32(cqe->rss_hash); in qede_lro_start()
320 seg_len = (int)LE_16(cqe->seg_len); in qede_lro_start()
321 len_on_first_bd = (int)LE_16(cqe->len_on_first_bd); in qede_lro_start()
334 if (cqe->ext_bd_len_list[i] == 0) { in qede_lro_start()
344 struct eth_fast_path_rx_tpa_cont_cqe *cqe) in qede_lro_cont() argument
349 lro_info = &rx_ring->lro_info[cqe->tpa_agg_index]; in qede_lro_cont()
354 qede_dump_cont_lro_cqe(cqe); in qede_lro_cont()
359 if (cqe->len_list[i] == 0) { in qede_lro_cont()
368 struct eth_fast_path_rx_tpa_end_cqe *cqe, in qede_lro_end() argument
379 lro_info = &rx_ring->lro_info[cqe->tpa_agg_index]; in qede_lro_end()
385 qede_dump_end_lro_cqe(cqe); in qede_lro_end()
389 work_length = total_packet_length = LE_16(cqe->total_packet_len); in qede_lro_end()
395 if (cqe->len_list[i] == 0) { in qede_lro_end()
434 (cqe->num_of_bds > 1)) { in qede_lro_end()
482 struct eth_fast_path_rx_reg_cqe *cqe) in qede_reg_jumbo_cqe() argument
491 u8 pad = cqe->placement_offset; in qede_reg_jumbo_cqe()
495 qede_dump_reg_cqe(cqe); in qede_reg_jumbo_cqe()
501 work_length = HOST_TO_LE_16(cqe->pkt_len); in qede_reg_jumbo_cqe()
506 for (i = 0; i < cqe->bd_num; i++) { in qede_reg_jumbo_cqe()
517 for (i = 0; i < cqe->bd_num; i++) { in qede_reg_jumbo_cqe()
524 for (i = 0; i < cqe->bd_num; i++) { in qede_reg_jumbo_cqe()
567 HOST_TO_LE_16(cqe->pars_flags.flags)); in qede_reg_jumbo_cqe()
579 struct eth_fast_path_rx_reg_cqe *cqe, in qede_reg_cqe() argument
586 uint16_t pkt_len = HOST_TO_LE_16(cqe->pkt_len); in qede_reg_cqe()
587 u8 pad = cqe->placement_offset; in qede_reg_cqe()
598 if (cqe->bd_num > 1) { in qede_reg_cqe()
606 return (qede_reg_jumbo_cqe(rx_ring, cqe)); in qede_reg_cqe()
668 HOST_TO_LE_16(cqe->pars_flags.flags)); in qede_reg_cqe()
691 union eth_rx_cqe *cqe; in qede_process_rx_ring() local
709 cqe = (union eth_rx_cqe *) in qede_process_rx_ring()
714 last_cqe_consumer, sizeof (*cqe), in qede_process_rx_ring()
717 cqe_type = cqe->fast_path_regular.type; in qede_process_rx_ring()
722 (struct eth_slow_path_rx_cqe *)cqe); in qede_process_rx_ring()
726 &cqe->fast_path_regular, in qede_process_rx_ring()
731 &cqe->fast_path_tpa_start); in qede_process_rx_ring()
735 &cqe->fast_path_tpa_cont); in qede_process_rx_ring()
739 &cqe->fast_path_tpa_end, in qede_process_rx_ring()