Lines Matching refs:tx_q
394 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_avail() local
397 if (tx_q->dirty_tx > tx_q->cur_tx) in stmmac_tx_avail()
398 avail = tx_q->dirty_tx - tx_q->cur_tx - 1; in stmmac_tx_avail()
400 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
430 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_eee_tx_busy() local
432 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_eee_tx_busy()
1365 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_display_tx_rings() local
1370 head_tx = (void *)tx_q->dma_etx; in stmmac_display_tx_rings()
1372 } else if (tx_q->tbs & STMMAC_TBS_AVAIL) { in stmmac_display_tx_rings()
1373 head_tx = (void *)tx_q->dma_entx; in stmmac_display_tx_rings()
1376 head_tx = (void *)tx_q->dma_tx; in stmmac_display_tx_rings()
1381 tx_q->dma_tx_phy, desc_size); in stmmac_display_tx_rings()
1462 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_clear_tx_descriptors() local
1471 p = &tx_q->dma_etx[i].basic; in stmmac_clear_tx_descriptors()
1472 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_clear_tx_descriptors()
1473 p = &tx_q->dma_entx[i].basic; in stmmac_clear_tx_descriptors()
1475 p = &tx_q->dma_tx[i]; in stmmac_clear_tx_descriptors()
1587 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in stmmac_free_tx_buffer() local
1589 if (tx_q->tx_skbuff_dma[i].buf && in stmmac_free_tx_buffer()
1590 tx_q->tx_skbuff_dma[i].buf_type != STMMAC_TXBUF_T_XDP_TX) { in stmmac_free_tx_buffer()
1591 if (tx_q->tx_skbuff_dma[i].map_as_page) in stmmac_free_tx_buffer()
1593 tx_q->tx_skbuff_dma[i].buf, in stmmac_free_tx_buffer()
1594 tx_q->tx_skbuff_dma[i].len, in stmmac_free_tx_buffer()
1598 tx_q->tx_skbuff_dma[i].buf, in stmmac_free_tx_buffer()
1599 tx_q->tx_skbuff_dma[i].len, in stmmac_free_tx_buffer()
1603 if (tx_q->xdpf[i] && in stmmac_free_tx_buffer()
1604 (tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XDP_TX || in stmmac_free_tx_buffer()
1605 tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XDP_NDO)) { in stmmac_free_tx_buffer()
1606 xdp_return_frame(tx_q->xdpf[i]); in stmmac_free_tx_buffer()
1607 tx_q->xdpf[i] = NULL; in stmmac_free_tx_buffer()
1610 if (tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_XSK_TX) in stmmac_free_tx_buffer()
1611 tx_q->xsk_frames_done++; in stmmac_free_tx_buffer()
1613 if (tx_q->tx_skbuff[i] && in stmmac_free_tx_buffer()
1614 tx_q->tx_skbuff_dma[i].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_free_tx_buffer()
1615 dev_kfree_skb_any(tx_q->tx_skbuff[i]); in stmmac_free_tx_buffer()
1616 tx_q->tx_skbuff[i] = NULL; in stmmac_free_tx_buffer()
1619 tx_q->tx_skbuff_dma[i].buf = 0; in stmmac_free_tx_buffer()
1620 tx_q->tx_skbuff_dma[i].map_as_page = false; in stmmac_free_tx_buffer()
1858 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __init_dma_tx_desc_rings() local
1863 (u32)tx_q->dma_tx_phy); in __init_dma_tx_desc_rings()
1868 stmmac_mode_init(priv, tx_q->dma_etx, in __init_dma_tx_desc_rings()
1869 tx_q->dma_tx_phy, in __init_dma_tx_desc_rings()
1871 else if (!(tx_q->tbs & STMMAC_TBS_AVAIL)) in __init_dma_tx_desc_rings()
1872 stmmac_mode_init(priv, tx_q->dma_tx, in __init_dma_tx_desc_rings()
1873 tx_q->dma_tx_phy, in __init_dma_tx_desc_rings()
1877 tx_q->xsk_pool = stmmac_get_xsk_pool(priv, queue); in __init_dma_tx_desc_rings()
1883 p = &((tx_q->dma_etx + i)->basic); in __init_dma_tx_desc_rings()
1884 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __init_dma_tx_desc_rings()
1885 p = &((tx_q->dma_entx + i)->basic); in __init_dma_tx_desc_rings()
1887 p = tx_q->dma_tx + i; in __init_dma_tx_desc_rings()
1891 tx_q->tx_skbuff_dma[i].buf = 0; in __init_dma_tx_desc_rings()
1892 tx_q->tx_skbuff_dma[i].map_as_page = false; in __init_dma_tx_desc_rings()
1893 tx_q->tx_skbuff_dma[i].len = 0; in __init_dma_tx_desc_rings()
1894 tx_q->tx_skbuff_dma[i].last_segment = false; in __init_dma_tx_desc_rings()
1895 tx_q->tx_skbuff[i] = NULL; in __init_dma_tx_desc_rings()
1956 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in dma_free_tx_skbufs() local
1959 tx_q->xsk_frames_done = 0; in dma_free_tx_skbufs()
1964 if (tx_q->xsk_pool && tx_q->xsk_frames_done) { in dma_free_tx_skbufs()
1965 xsk_tx_completed(tx_q->xsk_pool, tx_q->xsk_frames_done); in dma_free_tx_skbufs()
1966 tx_q->xsk_frames_done = 0; in dma_free_tx_skbufs()
1967 tx_q->xsk_pool = NULL; in dma_free_tx_skbufs()
2044 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __free_dma_tx_desc_resources() local
2053 addr = tx_q->dma_etx; in __free_dma_tx_desc_resources()
2054 } else if (tx_q->tbs & STMMAC_TBS_AVAIL) { in __free_dma_tx_desc_resources()
2056 addr = tx_q->dma_entx; in __free_dma_tx_desc_resources()
2059 addr = tx_q->dma_tx; in __free_dma_tx_desc_resources()
2064 dma_free_coherent(priv->device, size, addr, tx_q->dma_tx_phy); in __free_dma_tx_desc_resources()
2066 kfree(tx_q->tx_skbuff_dma); in __free_dma_tx_desc_resources()
2067 kfree(tx_q->tx_skbuff); in __free_dma_tx_desc_resources()
2210 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[queue]; in __alloc_dma_tx_desc_resources() local
2214 tx_q->queue_index = queue; in __alloc_dma_tx_desc_resources()
2215 tx_q->priv_data = priv; in __alloc_dma_tx_desc_resources()
2217 tx_q->tx_skbuff_dma = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2218 sizeof(*tx_q->tx_skbuff_dma), in __alloc_dma_tx_desc_resources()
2220 if (!tx_q->tx_skbuff_dma) in __alloc_dma_tx_desc_resources()
2223 tx_q->tx_skbuff = kcalloc(dma_conf->dma_tx_size, in __alloc_dma_tx_desc_resources()
2226 if (!tx_q->tx_skbuff) in __alloc_dma_tx_desc_resources()
2231 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __alloc_dma_tx_desc_resources()
2239 &tx_q->dma_tx_phy, GFP_KERNEL); in __alloc_dma_tx_desc_resources()
2244 tx_q->dma_etx = addr; in __alloc_dma_tx_desc_resources()
2245 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in __alloc_dma_tx_desc_resources()
2246 tx_q->dma_entx = addr; in __alloc_dma_tx_desc_resources()
2248 tx_q->dma_tx = addr; in __alloc_dma_tx_desc_resources()
2569 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_zc() local
2571 struct xsk_buff_pool *pool = tx_q->xsk_pool; in stmmac_xdp_xmit_zc()
2572 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2609 tx_desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xdp_xmit_zc()
2610 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xdp_xmit_zc()
2611 tx_desc = &tx_q->dma_entx[entry].basic; in stmmac_xdp_xmit_zc()
2613 tx_desc = tx_q->dma_tx + entry; in stmmac_xdp_xmit_zc()
2619 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XSK_TX; in stmmac_xdp_xmit_zc()
2625 tx_q->tx_skbuff_dma[entry].buf = 0; in stmmac_xdp_xmit_zc()
2626 tx_q->xdpf[entry] = NULL; in stmmac_xdp_xmit_zc()
2628 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_xdp_xmit_zc()
2629 tx_q->tx_skbuff_dma[entry].len = xdp_desc.len; in stmmac_xdp_xmit_zc()
2630 tx_q->tx_skbuff_dma[entry].last_segment = true; in stmmac_xdp_xmit_zc()
2631 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_xdp_xmit_zc()
2635 tx_q->tx_count_frames++; in stmmac_xdp_xmit_zc()
2639 else if (tx_q->tx_count_frames % priv->tx_coal_frames[queue] == 0) in stmmac_xdp_xmit_zc()
2647 meta_req.tbs = tx_q->tbs; in stmmac_xdp_xmit_zc()
2648 meta_req.edesc = &tx_q->dma_entx[entry]; in stmmac_xdp_xmit_zc()
2652 tx_q->tx_count_frames = 0; in stmmac_xdp_xmit_zc()
2664 &tx_q->tx_skbuff_dma[entry].xsk_meta); in stmmac_xdp_xmit_zc()
2666 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2667 entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2714 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_clean() local
2722 tx_q->xsk_frames_done = 0; in stmmac_tx_clean()
2724 entry = tx_q->dirty_tx; in stmmac_tx_clean()
2727 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2733 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_TX || in stmmac_tx_clean()
2734 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_NDO) { in stmmac_tx_clean()
2735 xdpf = tx_q->xdpf[entry]; in stmmac_tx_clean()
2737 } else if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_tx_clean()
2739 skb = tx_q->tx_skbuff[entry]; in stmmac_tx_clean()
2746 p = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_tx_clean()
2747 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tx_clean()
2748 p = &tx_q->dma_entx[entry].basic; in stmmac_tx_clean()
2750 p = tx_q->dma_tx + entry; in stmmac_tx_clean()
2776 } else if (tx_q->xsk_pool && in stmmac_tx_clean()
2777 xp_tx_metadata_enabled(tx_q->xsk_pool)) { in stmmac_tx_clean()
2783 xsk_tx_metadata_complete(&tx_q->tx_skbuff_dma[entry].xsk_meta, in stmmac_tx_clean()
2789 if (likely(tx_q->tx_skbuff_dma[entry].buf && in stmmac_tx_clean()
2790 tx_q->tx_skbuff_dma[entry].buf_type != STMMAC_TXBUF_T_XDP_TX)) { in stmmac_tx_clean()
2791 if (tx_q->tx_skbuff_dma[entry].map_as_page) in stmmac_tx_clean()
2793 tx_q->tx_skbuff_dma[entry].buf, in stmmac_tx_clean()
2794 tx_q->tx_skbuff_dma[entry].len, in stmmac_tx_clean()
2798 tx_q->tx_skbuff_dma[entry].buf, in stmmac_tx_clean()
2799 tx_q->tx_skbuff_dma[entry].len, in stmmac_tx_clean()
2801 tx_q->tx_skbuff_dma[entry].buf = 0; in stmmac_tx_clean()
2802 tx_q->tx_skbuff_dma[entry].len = 0; in stmmac_tx_clean()
2803 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_tx_clean()
2806 stmmac_clean_desc3(priv, tx_q, p); in stmmac_tx_clean()
2808 tx_q->tx_skbuff_dma[entry].last_segment = false; in stmmac_tx_clean()
2809 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_tx_clean()
2812 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_TX) { in stmmac_tx_clean()
2814 tx_q->xdpf[entry] = NULL; in stmmac_tx_clean()
2818 tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XDP_NDO) { in stmmac_tx_clean()
2820 tx_q->xdpf[entry] = NULL; in stmmac_tx_clean()
2823 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_XSK_TX) in stmmac_tx_clean()
2824 tx_q->xsk_frames_done++; in stmmac_tx_clean()
2826 if (tx_q->tx_skbuff_dma[entry].buf_type == STMMAC_TXBUF_T_SKB) { in stmmac_tx_clean()
2831 tx_q->tx_skbuff[entry] = NULL; in stmmac_tx_clean()
2839 tx_q->dirty_tx = entry; in stmmac_tx_clean()
2853 if (tx_q->xsk_pool) { in stmmac_tx_clean()
2856 if (tx_q->xsk_frames_done) in stmmac_tx_clean()
2857 xsk_tx_completed(tx_q->xsk_pool, tx_q->xsk_frames_done); in stmmac_tx_clean()
2859 if (xsk_uses_need_wakeup(tx_q->xsk_pool)) in stmmac_tx_clean()
2860 xsk_set_tx_need_wakeup(tx_q->xsk_pool); in stmmac_tx_clean()
2879 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_tx_clean()
2905 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_tx_err() local
2914 tx_q->dma_tx_phy, chan); in stmmac_tx_err()
2973 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_napi_check() local
2980 tx_napi = tx_q->xsk_pool ? &ch->rxtx_napi : &ch->tx_napi; in stmmac_napi_check()
3106 struct stmmac_tx_queue *tx_q; in stmmac_init_dma_engine() local
3152 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_dma_engine()
3155 tx_q->dma_tx_phy, chan); in stmmac_init_dma_engine()
3157 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_init_dma_engine()
3159 tx_q->tx_tail_addr, chan); in stmmac_init_dma_engine()
3167 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tx_timer_arm() local
3175 ch = &priv->channel[tx_q->queue_index]; in stmmac_tx_timer_arm()
3176 napi = tx_q->xsk_pool ? &ch->rxtx_napi : &ch->tx_napi; in stmmac_tx_timer_arm()
3183 hrtimer_start(&tx_q->txtimer, in stmmac_tx_timer_arm()
3187 hrtimer_try_to_cancel(&tx_q->txtimer); in stmmac_tx_timer_arm()
3198 struct stmmac_tx_queue *tx_q = container_of(t, struct stmmac_tx_queue, txtimer); in stmmac_tx_timer() local
3199 struct stmmac_priv *priv = tx_q->priv_data; in stmmac_tx_timer()
3203 ch = &priv->channel[tx_q->queue_index]; in stmmac_tx_timer()
3204 napi = tx_q->xsk_pool ? &ch->rxtx_napi : &ch->tx_napi; in stmmac_tx_timer()
3233 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_init_coalesce() local
3238 hrtimer_setup(&tx_q->txtimer, stmmac_tx_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in stmmac_init_coalesce()
3581 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup() local
3584 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_hw_setup()
3603 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_hw_setup() local
3604 int enable = tx_q->tbs & STMMAC_TBS_AVAIL; in stmmac_hw_setup()
3977 struct stmmac_tx_queue *tx_q = &dma_conf->tx_queue[chan]; in stmmac_setup_dma_desc() local
3981 tx_q->tbs |= tbs_en ? STMMAC_TBS_AVAIL : 0; in stmmac_setup_dma_desc()
4163 struct stmmac_tx_queue *tx_q) in stmmac_vlan_insert() argument
4180 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_vlan_insert()
4181 p = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_vlan_insert()
4183 p = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_vlan_insert()
4189 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
4207 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_allocator() local
4217 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_allocator()
4219 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_allocator()
4221 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_allocator()
4222 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_allocator()
4224 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_allocator()
4242 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_flush_tx_descriptors() local
4247 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_flush_tx_descriptors()
4258 tx_q->tx_tail_addr = tx_q->dma_tx_phy + (tx_q->cur_tx * desc_size); in stmmac_flush_tx_descriptors()
4259 stmmac_set_tx_tail_ptr(priv, priv->ioaddr, tx_q->tx_tail_addr, queue); in stmmac_flush_tx_descriptors()
4305 struct stmmac_tx_queue *tx_q; in stmmac_tso_xmit() local
4328 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_tso_xmit()
4330 first_tx = tx_q->cur_tx; in stmmac_tso_xmit()
4360 if (mss != tx_q->mss) { in stmmac_tso_xmit()
4361 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4362 mss_desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4364 mss_desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4367 tx_q->mss = mss; in stmmac_tso_xmit()
4368 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_xmit()
4370 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_xmit()
4380 first_entry = tx_q->cur_tx; in stmmac_tso_xmit()
4381 WARN_ON(tx_q->tx_skbuff[first_entry]); in stmmac_tso_xmit()
4383 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4384 desc = &tx_q->dma_entx[first_entry].basic; in stmmac_tso_xmit()
4386 desc = &tx_q->dma_tx[first_entry]; in stmmac_tso_xmit()
4411 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4412 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_headlen(skb); in stmmac_tso_xmit()
4413 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = false; in stmmac_tso_xmit()
4414 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4429 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4430 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_frag_size(frag); in stmmac_tso_xmit()
4431 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = true; in stmmac_tso_xmit()
4432 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4435 tx_q->tx_skbuff_dma[tx_q->cur_tx].last_segment = true; in stmmac_tso_xmit()
4438 tx_q->tx_skbuff[tx_q->cur_tx] = skb; in stmmac_tso_xmit()
4439 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4442 tx_packets = (tx_q->cur_tx + 1) - first_tx; in stmmac_tso_xmit()
4443 tx_q->tx_count_frames += tx_packets; in stmmac_tso_xmit()
4451 else if ((tx_q->tx_count_frames % in stmmac_tso_xmit()
4458 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_tso_xmit()
4459 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4461 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4463 tx_q->tx_count_frames = 0; in stmmac_tso_xmit()
4472 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4502 tx_q->tx_skbuff_dma[first_entry].last_segment, in stmmac_tso_xmit()
4518 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_tso_xmit()
4519 tx_q->cur_tx, first, nfrags); in stmmac_tso_xmit()
4580 struct stmmac_tx_queue *tx_q; in stmmac_xmit() local
4585 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xmit()
4587 first_tx = tx_q->cur_tx; in stmmac_xmit()
4620 has_vlan = stmmac_vlan_insert(priv, skb, tx_q); in stmmac_xmit()
4622 entry = tx_q->cur_tx; in stmmac_xmit()
4624 WARN_ON(tx_q->tx_skbuff[first_entry]); in stmmac_xmit()
4643 desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xmit()
4644 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4645 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4647 desc = tx_q->dma_tx + entry; in stmmac_xmit()
4660 entry = stmmac_jumbo_frm(priv, tx_q, skb, csum_insertion); in stmmac_xmit()
4671 WARN_ON(tx_q->tx_skbuff[entry]); in stmmac_xmit()
4674 desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xmit()
4675 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4676 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4678 desc = tx_q->dma_tx + entry; in stmmac_xmit()
4685 tx_q->tx_skbuff_dma[entry].buf = des; in stmmac_xmit()
4689 tx_q->tx_skbuff_dma[entry].map_as_page = true; in stmmac_xmit()
4690 tx_q->tx_skbuff_dma[entry].len = len; in stmmac_xmit()
4691 tx_q->tx_skbuff_dma[entry].last_segment = last_segment; in stmmac_xmit()
4692 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4700 tx_q->tx_skbuff[entry] = skb; in stmmac_xmit()
4701 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4709 tx_q->tx_count_frames += tx_packets; in stmmac_xmit()
4717 else if ((tx_q->tx_count_frames % in stmmac_xmit()
4725 desc = &tx_q->dma_etx[entry].basic; in stmmac_xmit()
4726 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xmit()
4727 desc = &tx_q->dma_entx[entry].basic; in stmmac_xmit()
4729 desc = &tx_q->dma_tx[entry]; in stmmac_xmit()
4731 tx_q->tx_count_frames = 0; in stmmac_xmit()
4741 tx_q->cur_tx = entry; in stmmac_xmit()
4746 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_xmit()
4782 tx_q->tx_skbuff_dma[first_entry].buf = des; in stmmac_xmit()
4783 tx_q->tx_skbuff_dma[first_entry].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_xmit()
4784 tx_q->tx_skbuff_dma[first_entry].map_as_page = false; in stmmac_xmit()
4788 tx_q->tx_skbuff_dma[first_entry].len = nopaged_len; in stmmac_xmit()
4789 tx_q->tx_skbuff_dma[first_entry].last_segment = last_segment; in stmmac_xmit()
4804 if (tx_q->tbs & STMMAC_TBS_EN) { in stmmac_xmit()
4807 tbs_desc = &tx_q->dma_entx[first_entry]; in stmmac_xmit()
4972 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xdp_xmit_xdpf() local
4973 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_xdpf()
4989 tx_desc = (struct dma_desc *)(tx_q->dma_etx + entry); in stmmac_xdp_xmit_xdpf()
4990 else if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_xdp_xmit_xdpf()
4991 tx_desc = &tx_q->dma_entx[entry].basic; in stmmac_xdp_xmit_xdpf()
4993 tx_desc = tx_q->dma_tx + entry; in stmmac_xdp_xmit_xdpf()
5001 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XDP_NDO; in stmmac_xdp_xmit_xdpf()
5010 tx_q->tx_skbuff_dma[entry].buf_type = STMMAC_TXBUF_T_XDP_TX; in stmmac_xdp_xmit_xdpf()
5013 tx_q->tx_skbuff_dma[entry].buf = dma_addr; in stmmac_xdp_xmit_xdpf()
5014 tx_q->tx_skbuff_dma[entry].map_as_page = false; in stmmac_xdp_xmit_xdpf()
5015 tx_q->tx_skbuff_dma[entry].len = xdpf->len; in stmmac_xdp_xmit_xdpf()
5016 tx_q->tx_skbuff_dma[entry].last_segment = true; in stmmac_xdp_xmit_xdpf()
5017 tx_q->tx_skbuff_dma[entry].is_jumbo = false; in stmmac_xdp_xmit_xdpf()
5019 tx_q->xdpf[entry] = xdpf; in stmmac_xdp_xmit_xdpf()
5027 tx_q->tx_count_frames++; in stmmac_xdp_xmit_xdpf()
5029 if (tx_q->tx_count_frames % priv->tx_coal_frames[queue] == 0) in stmmac_xdp_xmit_xdpf()
5035 tx_q->tx_count_frames = 0; in stmmac_xdp_xmit_xdpf()
5045 tx_q->cur_tx = entry; in stmmac_xdp_xmit_xdpf()
6155 struct stmmac_tx_queue *tx_q = (struct stmmac_tx_queue *)data; in stmmac_msi_intr_tx() local
6157 int chan = tx_q->queue_index; in stmmac_msi_intr_tx()
6161 dma_conf = container_of(tx_q, struct stmmac_dma_conf, tx_queue[chan]); in stmmac_msi_intr_tx()
6384 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_rings_status_show() local
6390 sysfs_display_ring((void *)tx_q->dma_etx, in stmmac_rings_status_show()
6391 priv->dma_conf.dma_tx_size, 1, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6392 } else if (!(tx_q->tbs & STMMAC_TBS_AVAIL)) { in stmmac_rings_status_show()
6394 sysfs_display_ring((void *)tx_q->dma_tx, in stmmac_rings_status_show()
6395 priv->dma_conf.dma_tx_size, 0, seq, tx_q->dma_tx_phy); in stmmac_rings_status_show()
6879 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_enable_tx_queue() local
6901 tx_q->dma_tx_phy, tx_q->queue_index); in stmmac_enable_tx_queue()
6903 if (tx_q->tbs & STMMAC_TBS_AVAIL) in stmmac_enable_tx_queue()
6904 stmmac_enable_tbs(priv, priv->ioaddr, 1, tx_q->queue_index); in stmmac_enable_tx_queue()
6906 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_enable_tx_queue()
6908 tx_q->tx_tail_addr, tx_q->queue_index); in stmmac_enable_tx_queue()
6957 struct stmmac_tx_queue *tx_q; in stmmac_xdp_open() local
7017 tx_q = &priv->dma_conf.tx_queue[chan]; in stmmac_xdp_open()
7020 tx_q->dma_tx_phy, chan); in stmmac_xdp_open()
7022 tx_q->tx_tail_addr = tx_q->dma_tx_phy; in stmmac_xdp_open()
7024 tx_q->tx_tail_addr, chan); in stmmac_xdp_open()
7026 hrtimer_setup(&tx_q->txtimer, stmmac_tx_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in stmmac_xdp_open()
7062 struct stmmac_tx_queue *tx_q; in stmmac_xsk_wakeup() local
7077 tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_xsk_wakeup()
7080 if (!rx_q->xsk_pool && !tx_q->xsk_pool) in stmmac_xsk_wakeup()
7890 struct stmmac_tx_queue *tx_q = &priv->dma_conf.tx_queue[queue]; in stmmac_reset_tx_queue() local
7892 tx_q->cur_tx = 0; in stmmac_reset_tx_queue()
7893 tx_q->dirty_tx = 0; in stmmac_reset_tx_queue()
7894 tx_q->mss = 0; in stmmac_reset_tx_queue()