Lines Matching refs:rdata
1131 struct xgbe_ring_data *rdata; in xgbe_free_tx_data() local
1142 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_free_tx_data()
1143 desc_if->unmap_rdata(pdata, rdata); in xgbe_free_tx_data()
1154 struct xgbe_ring_data *rdata; in xgbe_free_rx_data() local
1165 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_free_rx_data()
1166 desc_if->unmap_rdata(pdata, rdata); in xgbe_free_rx_data()
2325 struct xgbe_ring_data *rdata; in xgbe_rx_refresh() local
2328 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty); in xgbe_rx_refresh()
2331 desc_if->unmap_rdata(pdata, rdata); in xgbe_rx_refresh()
2333 if (desc_if->map_rx_buffer(pdata, ring, rdata)) in xgbe_rx_refresh()
2336 hw_if->rx_desc_reset(pdata, rdata, ring->dirty); in xgbe_rx_refresh()
2346 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty - 1); in xgbe_rx_refresh()
2348 lower_32_bits(rdata->rdesc_dma)); in xgbe_rx_refresh()
2353 struct xgbe_ring_data *rdata, in xgbe_create_skb() argument
2359 skb = napi_alloc_skb(napi, rdata->rx.hdr.dma_len); in xgbe_create_skb()
2366 dma_sync_single_range_for_cpu(pdata->dev, rdata->rx.hdr.dma_base, in xgbe_create_skb()
2367 rdata->rx.hdr.dma_off, in xgbe_create_skb()
2368 rdata->rx.hdr.dma_len, DMA_FROM_DEVICE); in xgbe_create_skb()
2370 packet = page_address(rdata->rx.hdr.pa.pages) + in xgbe_create_skb()
2371 rdata->rx.hdr.pa.pages_offset; in xgbe_create_skb()
2378 static unsigned int xgbe_rx_buf1_len(struct xgbe_ring_data *rdata, in xgbe_rx_buf1_len() argument
2386 if (rdata->rx.hdr_len) in xgbe_rx_buf1_len()
2387 return rdata->rx.hdr_len; in xgbe_rx_buf1_len()
2393 return rdata->rx.hdr.dma_len; in xgbe_rx_buf1_len()
2398 return min_t(unsigned int, rdata->rx.hdr.dma_len, rdata->rx.len); in xgbe_rx_buf1_len()
2401 static unsigned int xgbe_rx_buf2_len(struct xgbe_ring_data *rdata, in xgbe_rx_buf2_len() argument
2407 return rdata->rx.buf.dma_len; in xgbe_rx_buf2_len()
2412 return rdata->rx.len - len; in xgbe_rx_buf2_len()
2421 struct xgbe_ring_data *rdata; in xgbe_tx_poll() local
2444 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty); in xgbe_tx_poll()
2445 rdesc = rdata->rdesc; in xgbe_tx_poll()
2458 tx_packets += rdata->tx.packets; in xgbe_tx_poll()
2459 tx_bytes += rdata->tx.bytes; in xgbe_tx_poll()
2463 desc_if->unmap_rdata(pdata, rdata); in xgbe_tx_poll()
2464 hw_if->tx_desc_reset(rdata); in xgbe_tx_poll()
2491 struct xgbe_ring_data *rdata; in xgbe_rx_poll() local
2513 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2519 if (!received && rdata->state_saved) { in xgbe_rx_poll()
2520 skb = rdata->state.skb; in xgbe_rx_poll()
2521 error = rdata->state.error; in xgbe_rx_poll()
2522 len = rdata->state.len; in xgbe_rx_poll()
2531 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2565 buf1_len = xgbe_rx_buf1_len(rdata, packet); in xgbe_rx_poll()
2567 buf2_len = xgbe_rx_buf2_len(rdata, packet, len); in xgbe_rx_poll()
2570 if (buf2_len > rdata->rx.buf.dma_len) { in xgbe_rx_poll()
2579 skb = xgbe_create_skb(pdata, napi, rdata, in xgbe_rx_poll()
2589 rdata->rx.buf.dma_base, in xgbe_rx_poll()
2590 rdata->rx.buf.dma_off, in xgbe_rx_poll()
2591 rdata->rx.buf.dma_len, in xgbe_rx_poll()
2595 rdata->rx.buf.pa.pages, in xgbe_rx_poll()
2596 rdata->rx.buf.pa.pages_offset, in xgbe_rx_poll()
2598 rdata->rx.buf.dma_len); in xgbe_rx_poll()
2599 rdata->rx.buf.pa.pages = NULL; in xgbe_rx_poll()
2674 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2675 rdata->state_saved = 1; in xgbe_rx_poll()
2676 rdata->state.skb = skb; in xgbe_rx_poll()
2677 rdata->state.len = len; in xgbe_rx_poll()
2678 rdata->state.error = error; in xgbe_rx_poll()
2758 struct xgbe_ring_data *rdata; in xgbe_dump_tx_desc() local
2762 rdata = XGBE_GET_DESC_DATA(ring, idx); in xgbe_dump_tx_desc()
2763 rdesc = rdata->rdesc; in xgbe_dump_tx_desc()
2778 struct xgbe_ring_data *rdata; in xgbe_dump_rx_desc() local
2781 rdata = XGBE_GET_DESC_DATA(ring, idx); in xgbe_dump_rx_desc()
2782 rdesc = rdata->rdesc; in xgbe_dump_rx_desc()