Lines Matching defs:pool

23 void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
24 bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
25 u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max);
26 void xsk_tx_release(struct xsk_buff_pool *pool);
29 void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
30 void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
31 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
32 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
33 bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
35 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
37 return XDP_PACKET_HEADROOM + pool->headroom;
40 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
42 return pool->chunk_size;
45 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
47 return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
50 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
53 xp_set_rxq_info(pool, rxq);
56 static inline void xsk_pool_fill_cb(struct xsk_buff_pool *pool,
59 xp_fill_cb(pool, desc);
62 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
65 xp_dma_unmap(pool, attrs);
68 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
71 struct xdp_umem *umem = pool->umem;
73 return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
90 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
92 return xp_alloc(pool);
101 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
103 return xp_alloc_batch(pool, xdp, max);
106 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
108 return xp_can_alloc(pool, count);
114 struct list_head *xskb_list = &xskb->pool->xskb_list;
142 list_add_tail(&frag->list_node, &frag->pool->xskb_list);
153 frag = list_first_entry_or_null(&xskb->pool->xskb_list,
175 frag = list_first_entry(&xskb->pool->xskb_list, struct xdp_buff_xsk,
185 frag = list_last_entry(&xskb->pool->xskb_list, struct xdp_buff_xsk,
198 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
201 return xp_raw_get_dma(pool, addr);
204 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
206 return xp_raw_get_data(pool, addr);
211 * @pool: XSk buff pool desc address belongs to
221 xsk_buff_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr)
223 return xp_raw_get_ctx(pool, addr);
239 __xsk_buff_get_metadata(const struct xsk_buff_pool *pool, void *data)
243 if (!pool->tx_metadata_len)
246 meta = data - pool->tx_metadata_len;
254 xsk_buff_get_metadata(struct xsk_buff_pool *pool, u64 addr)
256 return __xsk_buff_get_metadata(pool, xp_raw_get_data(pool, addr));
266 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
270 xp_dma_sync_for_device(pool, dma, size);
275 static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
279 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
285 static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max)
290 static inline void xsk_tx_release(struct xsk_buff_pool *pool)
300 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
304 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
308 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
312 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
316 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
321 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
326 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
331 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
336 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
341 static inline void xsk_pool_fill_cb(struct xsk_buff_pool *pool,
346 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
351 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
367 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
377 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
382 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
420 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
426 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
432 xsk_buff_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr)
443 __xsk_buff_get_metadata(const struct xsk_buff_pool *pool, void *data)
449 xsk_buff_get_metadata(struct xsk_buff_pool *pool, u64 addr)
458 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,