Searched refs:cached_prod (Results 1 – 3 of 3) sorted by relevance
43 u32 cached_prod; member130 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_addr_unchecked()219 return q->cached_cons != q->cached_prod; in xskq_has_descs()237 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_desc()272 while (cached_cons != q->cached_prod && nb_entries < max) { in xskq_cons_read_desc_batch()312 q->cached_prod = smp_load_acquire(&q->ring->producer); /* C, matches B */ in __xskq_cons_peek()323 u32 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()329 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()336 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_addr_unchecked()345 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_desc()[all …]
29 __u32 cached_prod; \88 __u32 free_entries = r->cached_cons - r->cached_prod; in xsk_prod_nb_free() 98 * free_entries = r->cached_prod - r->cached_cons + r->size. in xsk_prod_nb_free() 103 return r->cached_cons - r->cached_prod; in xsk_prod_nb_free() 108 __u32 entries = r->cached_prod - r->cached_cons; in xsk_cons_nb_avail() 111 r->cached_prod = __atomic_load_n(r->producer, __ATOMIC_ACQUIRE); in xsk_cons_nb_avail() 112 entries = r->cached_prod - r->cached_cons; in xsk_cons_nb_avail() 123 *idx = prod->cached_prod; in xsk_ring_prod__reserve() 124 prod->cached_prod += nb; in xsk_ring_prod__reserve() 139 prod->cached_prod in xsk_ring_prod__cancel() [all...]
640 rx->cached_prod = *rx->producer; in xsk_socket__create_shared()661 tx->cached_prod = *tx->producer; in xsk_socket__create_shared()