Lines Matching refs:txq

19 ena_free_tx_dma(ena_txq_t *txq)  in ena_free_tx_dma()  argument
21 if (txq->et_tcbs != NULL) { in ena_free_tx_dma()
22 for (uint_t i = 0; i < txq->et_sq_num_descs; i++) { in ena_free_tx_dma()
23 ena_tx_control_block_t *tcb = &txq->et_tcbs[i]; in ena_free_tx_dma()
29 kmem_free(txq->et_tcbs, in ena_free_tx_dma()
30 sizeof (*txq->et_tcbs) * txq->et_sq_num_descs); in ena_free_tx_dma()
31 kmem_free(txq->et_tcbs_freelist, in ena_free_tx_dma()
32 sizeof (ena_tx_control_block_t *) * txq->et_sq_num_descs); in ena_free_tx_dma()
34 txq->et_tcbs = NULL; in ena_free_tx_dma()
35 txq->et_tcbs_freelist = NULL; in ena_free_tx_dma()
36 txq->et_tcbs_freelist_size = 0; in ena_free_tx_dma()
39 ena_dma_free(&txq->et_cq_dma); in ena_free_tx_dma()
40 txq->et_cq_descs = NULL; in ena_free_tx_dma()
42 ena_dma_free(&txq->et_sq_dma); in ena_free_tx_dma()
43 txq->et_sq_descs = NULL; in ena_free_tx_dma()
45 txq->et_state &= ~ENA_TXQ_STATE_HOST_ALLOC; in ena_free_tx_dma()
49 ena_alloc_tx_dma(ena_txq_t *txq) in ena_alloc_tx_dma() argument
51 ena_t *ena = txq->et_ena; in ena_alloc_tx_dma()
56 ASSERT0(txq->et_state & ENA_TXQ_STATE_HOST_ALLOC); in ena_alloc_tx_dma()
59 cq_descs_sz = txq->et_cq_num_descs * sizeof (*txq->et_cq_descs); in ena_alloc_tx_dma()
60 sq_descs_sz = txq->et_sq_num_descs * sizeof (*txq->et_sq_descs); in ena_alloc_tx_dma()
70 if (!ena_dma_alloc(ena, &txq->et_sq_dma, &sq_conf, sq_descs_sz)) { in ena_alloc_tx_dma()
74 txq->et_sq_descs = (void *)txq->et_sq_dma.edb_va; in ena_alloc_tx_dma()
75 txq->et_tcbs = kmem_zalloc(sizeof (*txq->et_tcbs) * in ena_alloc_tx_dma()
76 txq->et_sq_num_descs, KM_SLEEP); in ena_alloc_tx_dma()
77 txq->et_tcbs_freelist = kmem_zalloc(sizeof (ena_tx_control_block_t *) * in ena_alloc_tx_dma()
78 txq->et_sq_num_descs, KM_SLEEP); in ena_alloc_tx_dma()
80 for (uint_t i = 0; i < txq->et_sq_num_descs; i++) { in ena_alloc_tx_dma()
81 ena_tx_control_block_t *tcb = &txq->et_tcbs[i]; in ena_alloc_tx_dma()
97 txq->et_tcbs_freelist[i] = tcb; in ena_alloc_tx_dma()
99 txq->et_tcbs_freelist_size = txq->et_sq_num_descs; in ena_alloc_tx_dma()
109 if (!ena_dma_alloc(ena, &txq->et_cq_dma, &cq_conf, cq_descs_sz)) { in ena_alloc_tx_dma()
114 txq->et_cq_descs = (void *)txq->et_cq_dma.edb_va; in ena_alloc_tx_dma()
115 txq->et_state |= ENA_TXQ_STATE_HOST_ALLOC; in ena_alloc_tx_dma()
119 ena_free_tx_dma(txq); in ena_alloc_tx_dma()
124 ena_alloc_txq(ena_txq_t *txq) in ena_alloc_txq() argument
127 ena_t *ena = txq->et_ena; in ena_alloc_txq()
132 ASSERT3U(txq->et_cq_num_descs, >, 0); in ena_alloc_txq()
137 if ((ret = ena_alloc_tx_dma(txq)) != 0) { in ena_alloc_txq()
139 txq->et_txqs_idx, ret); in ena_alloc_txq()
143 ASSERT(txq->et_state & ENA_TXQ_STATE_HOST_ALLOC); in ena_alloc_txq()
148 ret = ena_create_cq(ena, txq->et_cq_num_descs, in ena_alloc_txq()
149 txq->et_cq_dma.edb_cookie->dmac_laddress, true, in ena_alloc_txq()
150 txq->et_intr_vector, &cq_hw_idx, &cq_unmask_addr, &cq_numanode); in ena_alloc_txq()
153 ena_err(ena, "failed to create Tx CQ %u: %d", txq->et_txqs_idx, in ena_alloc_txq()
158 txq->et_cq_hw_idx = cq_hw_idx; in ena_alloc_txq()
159 txq->et_cq_phase = 1; in ena_alloc_txq()
160 txq->et_cq_unmask_addr = cq_unmask_addr; in ena_alloc_txq()
161 txq->et_cq_numa_addr = cq_numanode; in ena_alloc_txq()
162 txq->et_state |= ENA_TXQ_STATE_CQ_CREATED; in ena_alloc_txq()
172 ASSERT3U(txq->et_sq_num_descs, ==, txq->et_cq_num_descs); in ena_alloc_txq()
174 ret = ena_create_sq(ena, txq->et_sq_num_descs, in ena_alloc_txq()
175 txq->et_sq_dma.edb_cookie->dmac_laddress, true, cq_hw_idx, in ena_alloc_txq()
179 ena_err(ena, "failed to create Tx SQ %u: %d", txq->et_txqs_idx, in ena_alloc_txq()
184 txq->et_sq_hw_idx = sq_hw_idx; in ena_alloc_txq()
185 txq->et_sq_db_addr = sq_db_addr; in ena_alloc_txq()
187 txq->et_sq_phase = 1; in ena_alloc_txq()
188 txq->et_sq_avail_descs = txq->et_sq_num_descs; in ena_alloc_txq()
189 txq->et_blocked = false; in ena_alloc_txq()
190 txq->et_stall_watchdog = 0; in ena_alloc_txq()
191 txq->et_state |= ENA_TXQ_STATE_SQ_CREATED; in ena_alloc_txq()
197 ena_cleanup_txq(ena_txq_t *txq, bool resetting) in ena_cleanup_txq() argument
200 ena_t *ena = txq->et_ena; in ena_cleanup_txq()
202 if ((txq->et_state & ENA_TXQ_STATE_SQ_CREATED) != 0) { in ena_cleanup_txq()
204 ret = ena_destroy_sq(ena, txq->et_sq_hw_idx, true); in ena_cleanup_txq()
208 txq->et_txqs_idx, ret); in ena_cleanup_txq()
212 txq->et_sq_hw_idx = 0; in ena_cleanup_txq()
213 txq->et_sq_db_addr = NULL; in ena_cleanup_txq()
214 txq->et_sq_tail_idx = 0; in ena_cleanup_txq()
215 txq->et_sq_phase = 0; in ena_cleanup_txq()
216 txq->et_state &= ~ENA_TXQ_STATE_SQ_CREATED; in ena_cleanup_txq()
219 if ((txq->et_state & ENA_TXQ_STATE_CQ_CREATED) != 0) { in ena_cleanup_txq()
221 ret = ena_destroy_cq(ena, txq->et_cq_hw_idx); in ena_cleanup_txq()
225 txq->et_txqs_idx, ret); in ena_cleanup_txq()
229 txq->et_cq_hw_idx = 0; in ena_cleanup_txq()
230 txq->et_cq_head_idx = 0; in ena_cleanup_txq()
231 txq->et_cq_phase = 0; in ena_cleanup_txq()
232 txq->et_cq_unmask_addr = NULL; in ena_cleanup_txq()
233 txq->et_cq_numa_addr = NULL; in ena_cleanup_txq()
234 txq->et_state &= ~ENA_TXQ_STATE_CQ_CREATED; in ena_cleanup_txq()
237 ena_free_tx_dma(txq); in ena_cleanup_txq()
238 VERIFY3S(txq->et_state, ==, ENA_TXQ_STATE_NONE); in ena_cleanup_txq()
244 ena_txq_t *txq = (ena_txq_t *)rh; in ena_ring_tx_stop() local
247 intr_ctrl = ena_hw_abs_read32(txq->et_ena, txq->et_cq_unmask_addr); in ena_ring_tx_stop()
249 ena_hw_abs_write32(txq->et_ena, txq->et_cq_unmask_addr, intr_ctrl); in ena_ring_tx_stop()
251 txq->et_state &= ~ENA_TXQ_STATE_RUNNING; in ena_ring_tx_stop()
252 txq->et_state &= ~ENA_TXQ_STATE_READY; in ena_ring_tx_stop()
258 ena_txq_t *txq = (ena_txq_t *)rh; in ena_ring_tx_start() local
259 ena_t *ena = txq->et_ena; in ena_ring_tx_start()
262 ena_dbg(ena, "ring_tx_start %p: state 0x%x", txq, txq->et_state); in ena_ring_tx_start()
264 mutex_enter(&txq->et_lock); in ena_ring_tx_start()
265 txq->et_m_gen_num = gen_num; in ena_ring_tx_start()
266 mutex_exit(&txq->et_lock); in ena_ring_tx_start()
268 txq->et_state |= ENA_TXQ_STATE_READY; in ena_ring_tx_start()
270 intr_ctrl = ena_hw_abs_read32(ena, txq->et_cq_unmask_addr); in ena_ring_tx_start()
272 ena_hw_abs_write32(ena, txq->et_cq_unmask_addr, intr_ctrl); in ena_ring_tx_start()
273 txq->et_state |= ENA_TXQ_STATE_RUNNING; in ena_ring_tx_start()
279 ena_tcb_alloc(ena_txq_t *txq) in ena_tcb_alloc() argument
283 ASSERT(MUTEX_HELD(&txq->et_lock)); in ena_tcb_alloc()
285 if (txq->et_tcbs_freelist_size == 0) in ena_tcb_alloc()
287 txq->et_tcbs_freelist_size--; in ena_tcb_alloc()
288 tcb = txq->et_tcbs_freelist[txq->et_tcbs_freelist_size]; in ena_tcb_alloc()
289 txq->et_tcbs_freelist[txq->et_tcbs_freelist_size] = NULL; in ena_tcb_alloc()
295 ena_tcb_free(ena_txq_t *txq, ena_tx_control_block_t *tcb) in ena_tcb_free() argument
298 ASSERT(MUTEX_HELD(&txq->et_lock)); in ena_tcb_free()
299 ASSERT3U(txq->et_tcbs_freelist_size, <, txq->et_sq_num_descs); in ena_tcb_free()
300 txq->et_tcbs_freelist[txq->et_tcbs_freelist_size++] = tcb; in ena_tcb_free()
325 ena_tcb_pull(const ena_txq_t *txq, ena_tx_control_block_t *tcb, mblk_t *mp) in ena_tcb_pull() argument
328 ena_t *ena = txq->et_ena; in ena_tcb_pull()
330 ASSERT(MUTEX_HELD(&txq->et_lock)); in ena_tcb_pull()
354 ena_fill_tx_data_desc(ena_txq_t *txq, ena_tx_control_block_t *tcb, in ena_fill_tx_data_desc() argument
366 ASSERT3U(hdr_len, <=, txq->et_ena->ena_tx_max_hdr_len); in ena_fill_tx_data_desc()
397 ena_submit_tx(ena_txq_t *txq, uint16_t desc_idx) in ena_submit_tx() argument
399 ena_hw_abs_write32(txq->et_ena, txq->et_sq_db_addr, desc_idx); in ena_submit_tx()
409 ena_txq_t *txq = arg; in ena_ring_tx() local
410 ena_t *ena = txq->et_ena; in ena_ring_tx()
414 const uint16_t modulo_mask = txq->et_sq_num_descs - 1; in ena_ring_tx()
425 !(txq->et_state & ENA_TXQ_STATE_RUNNING)) { in ena_ring_tx()
437 mutex_enter(&txq->et_stat_lock); in ena_ring_tx()
438 txq->et_stat.ets_hck_meoifail.value.ui64++; in ena_ring_tx()
439 mutex_exit(&txq->et_stat_lock); in ena_ring_tx()
443 mutex_enter(&txq->et_lock); in ena_ring_tx()
451 if (txq->et_blocked || txq->et_sq_avail_descs == 0) { in ena_ring_tx()
452 txq->et_blocked = true; in ena_ring_tx()
453 mutex_enter(&txq->et_stat_lock); in ena_ring_tx()
454 txq->et_stat.ets_blocked.value.ui64++; in ena_ring_tx()
455 mutex_exit(&txq->et_stat_lock); in ena_ring_tx()
456 mutex_exit(&txq->et_lock); in ena_ring_tx()
466 tcb = ena_tcb_alloc(txq); in ena_ring_tx()
468 ena_tcb_pull(txq, tcb, mp); in ena_ring_tx()
471 tail_mod = txq->et_sq_tail_idx & modulo_mask; in ena_ring_tx()
472 desc = &txq->et_sq_descs[tail_mod].etd_data; in ena_ring_tx()
473 ena_fill_tx_data_desc(txq, tcb, tcb->etcb_id, txq->et_sq_phase, desc, in ena_ring_tx()
478 txq->et_sq_avail_descs--; in ena_ring_tx()
485 txq->et_sq_tail_idx++; in ena_ring_tx()
486 ena_submit_tx(txq, txq->et_sq_tail_idx); in ena_ring_tx()
488 mutex_enter(&txq->et_stat_lock); in ena_ring_tx()
489 txq->et_stat.ets_packets.value.ui64++; in ena_ring_tx()
490 txq->et_stat.ets_bytes.value.ui64 += meo.meoi_len; in ena_ring_tx()
491 mutex_exit(&txq->et_stat_lock); in ena_ring_tx()
493 if ((txq->et_sq_tail_idx & modulo_mask) == 0) in ena_ring_tx()
494 txq->et_sq_phase ^= 1; in ena_ring_tx()
496 mutex_exit(&txq->et_lock); in ena_ring_tx()
502 ena_tx_intr_work(ena_txq_t *txq) in ena_tx_intr_work() argument
510 const uint16_t modulo_mask = txq->et_cq_num_descs - 1; in ena_tx_intr_work()
511 ena_t *ena = txq->et_ena; in ena_tx_intr_work()
513 mutex_enter(&txq->et_lock); in ena_tx_intr_work()
514 head_mod = txq->et_cq_head_idx & modulo_mask; in ena_tx_intr_work()
515 ENA_DMA_SYNC(txq->et_cq_dma, DDI_DMA_SYNC_FORKERNEL); in ena_tx_intr_work()
516 cdesc = &txq->et_cq_descs[head_mod]; in ena_tx_intr_work()
519 while (ENAHW_TX_CDESC_GET_PHASE(cdesc) == txq->et_cq_phase) { in ena_tx_intr_work()
524 if (req_id > txq->et_sq_num_descs) { in ena_tx_intr_work()
529 tcb = &txq->et_tcbs[req_id]; in ena_tx_intr_work()
541 ena_tcb_free(txq, tcb); in ena_tx_intr_work()
542 txq->et_sq_avail_descs++; in ena_tx_intr_work()
545 txq->et_cq_head_idx++; in ena_tx_intr_work()
546 head_mod = txq->et_cq_head_idx & modulo_mask; in ena_tx_intr_work()
548 txq->et_cq_phase ^= 1; in ena_tx_intr_work()
550 if (txq->et_blocked) { in ena_tx_intr_work()
551 txq->et_blocked = false; in ena_tx_intr_work()
552 txq->et_stall_watchdog = 0; in ena_tx_intr_work()
554 mac_tx_ring_update(ena->ena_mh, txq->et_mrh); in ena_tx_intr_work()
558 cdesc = &txq->et_cq_descs[head_mod]; in ena_tx_intr_work()
561 mutex_exit(&txq->et_lock); in ena_tx_intr_work()
567 mutex_enter(&txq->et_stat_lock); in ena_tx_intr_work()
568 txq->et_stat.ets_recycled.value.ui64 += recycled; in ena_tx_intr_work()
570 txq->et_stat.ets_unblocked.value.ui64++; in ena_tx_intr_work()
572 mutex_exit(&txq->et_stat_lock); in ena_tx_intr_work()