Lines Matching refs:kring

134 sync_kloop_kring_dump(const char *title, const struct netmap_kring *kring)  in sync_kloop_kring_dump()  argument
138 title, kring->name, kring->nr_hwcur, kring->rhead, in sync_kloop_kring_dump()
139 kring->rcur, kring->rtail, kring->nr_hwtail); in sync_kloop_kring_dump()
146 struct netmap_kring *kring; member
161 struct netmap_kring *kring = a->kring; in netmap_sync_kloop_tx_ring() local
171 if (unlikely(nm_kr_tryget(kring, 1, NULL))) { in netmap_sync_kloop_tx_ring()
175 num_slots = kring->nkr_num_slots; in netmap_sync_kloop_tx_ring()
185 batch = shadow_ring.head - kring->nr_hwcur; in netmap_sync_kloop_tx_ring()
193 uint32_t head_lim = kring->nr_hwcur + PTN_TX_BATCH_LIM(num_slots); in netmap_sync_kloop_tx_ring()
204 if (nm_kr_txspace(kring) <= (num_slots >> 1)) { in netmap_sync_kloop_tx_ring()
209 shadow_ring.tail = kring->rtail; in netmap_sync_kloop_tx_ring()
210 if (unlikely(nm_txsync_prologue(kring, &shadow_ring) >= num_slots)) { in netmap_sync_kloop_tx_ring()
212 netmap_ring_reinit(kring); in netmap_sync_kloop_tx_ring()
220 sync_kloop_kring_dump("pre txsync", kring); in netmap_sync_kloop_tx_ring()
223 if (unlikely(kring->nm_sync(kring, shadow_ring.flags))) { in netmap_sync_kloop_tx_ring()
237 sync_kloop_kernel_write(csb_ktoa, kring->nr_hwcur, in netmap_sync_kloop_tx_ring()
238 kring->nr_hwtail); in netmap_sync_kloop_tx_ring()
239 if (kring->rtail != kring->nr_hwtail) { in netmap_sync_kloop_tx_ring()
241 kring->rtail = kring->nr_hwtail; in netmap_sync_kloop_tx_ring()
248 sync_kloop_kring_dump("post txsync", kring); in netmap_sync_kloop_tx_ring()
263 if (shadow_ring.head == kring->rhead) { in netmap_sync_kloop_tx_ring()
277 if (shadow_ring.head != kring->rhead) { in netmap_sync_kloop_tx_ring()
286 if (nm_kr_txempty(kring)) { in netmap_sync_kloop_tx_ring()
294 nm_kr_put(kring); in netmap_sync_kloop_tx_ring()
307 sync_kloop_norxslots(struct netmap_kring *kring, uint32_t g_head) in sync_kloop_norxslots() argument
309 return (NM_ACCESS_ONCE(kring->nr_hwtail) == nm_prev(g_head, in sync_kloop_norxslots()
310 kring->nkr_num_slots - 1)); in sync_kloop_norxslots()
317 struct netmap_kring *kring = a->kring; in netmap_sync_kloop_rx_ring() local
327 if (unlikely(nm_kr_tryget(kring, 1, NULL))) { in netmap_sync_kloop_rx_ring()
331 num_slots = kring->nkr_num_slots; in netmap_sync_kloop_rx_ring()
334 num_slots = kring->nkr_num_slots; in netmap_sync_kloop_rx_ring()
347 shadow_ring.tail = kring->rtail; in netmap_sync_kloop_rx_ring()
348 if (unlikely(nm_rxsync_prologue(kring, &shadow_ring) >= num_slots)) { in netmap_sync_kloop_rx_ring()
350 netmap_ring_reinit(kring); in netmap_sync_kloop_rx_ring()
358 sync_kloop_kring_dump("pre rxsync", kring); in netmap_sync_kloop_rx_ring()
361 if (unlikely(kring->nm_sync(kring, shadow_ring.flags))) { in netmap_sync_kloop_rx_ring()
374 hwtail = NM_ACCESS_ONCE(kring->nr_hwtail); in netmap_sync_kloop_rx_ring()
375 sync_kloop_kernel_write(csb_ktoa, kring->nr_hwcur, hwtail); in netmap_sync_kloop_rx_ring()
376 if (kring->rtail != hwtail) { in netmap_sync_kloop_rx_ring()
377 kring->rtail = hwtail; in netmap_sync_kloop_rx_ring()
387 sync_kloop_kring_dump("post rxsync", kring); in netmap_sync_kloop_rx_ring()
402 if (sync_kloop_norxslots(kring, shadow_ring.head)) { in netmap_sync_kloop_rx_ring()
416 if (!sync_kloop_norxslots(kring, shadow_ring.head)) { in netmap_sync_kloop_rx_ring()
425 hwtail = NM_ACCESS_ONCE(kring->nr_hwtail); in netmap_sync_kloop_rx_ring()
426 if (unlikely(hwtail == kring->rhead || in netmap_sync_kloop_rx_ring()
431 hwtail, kring->rhead, dry_cycles); in netmap_sync_kloop_rx_ring()
436 nm_kr_put(kring); in netmap_sync_kloop_rx_ring()
641 a->kring = NMR(na, NR_TX)[i + priv->np_qfirst[NR_TX]]; in netmap_sync_kloop()
650 a->kring = NMR(na, NR_RX)[i + priv->np_qfirst[NR_RX]]; in netmap_sync_kloop()
973 struct netmap_kring *kring, int flags) in netmap_pt_guest_txsync() argument
984 kring->nr_hwcur = ktoa->hwcur; in netmap_pt_guest_txsync()
985 nm_sync_kloop_appl_write(atok, kring->rcur, kring->rhead); in netmap_pt_guest_txsync()
988 if (((kring->rhead != kring->nr_hwcur || nm_kr_wouldblock(kring)) in netmap_pt_guest_txsync()
998 if (nm_kr_wouldblock(kring) || (flags & NAF_FORCE_RECLAIM)) { in netmap_pt_guest_txsync()
999 nm_sync_kloop_appl_read(ktoa, &kring->nr_hwtail, in netmap_pt_guest_txsync()
1000 &kring->nr_hwcur); in netmap_pt_guest_txsync()
1008 if (nm_kr_wouldblock(kring) && !(kring->nr_kflags & NKR_NOINTR)) { in netmap_pt_guest_txsync()
1013 nm_sync_kloop_appl_read(ktoa, &kring->nr_hwtail, in netmap_pt_guest_txsync()
1014 &kring->nr_hwcur); in netmap_pt_guest_txsync()
1016 if (unlikely(!nm_kr_wouldblock(kring))) { in netmap_pt_guest_txsync()
1022 kring->name, atok->head, atok->cur, ktoa->hwtail, in netmap_pt_guest_txsync()
1023 kring->rhead, kring->rcur, kring->nr_hwtail); in netmap_pt_guest_txsync()
1041 struct netmap_kring *kring, int flags) in netmap_pt_guest_rxsync() argument
1053 nm_sync_kloop_appl_read(ktoa, &kring->nr_hwtail, &kring->nr_hwcur); in netmap_pt_guest_rxsync()
1054 kring->nr_kflags &= ~NKR_PENDINTR; in netmap_pt_guest_rxsync()
1060 if (kring->rhead != kring->nr_hwcur) { in netmap_pt_guest_rxsync()
1061 nm_sync_kloop_appl_write(atok, kring->rcur, kring->rhead); in netmap_pt_guest_rxsync()
1069 if (nm_kr_wouldblock(kring) && !(kring->nr_kflags & NKR_NOINTR)) { in netmap_pt_guest_rxsync()
1074 nm_sync_kloop_appl_read(ktoa, &kring->nr_hwtail, in netmap_pt_guest_rxsync()
1075 &kring->nr_hwcur); in netmap_pt_guest_rxsync()
1077 if (!nm_kr_wouldblock(kring)) { in netmap_pt_guest_rxsync()
1083 if ((kring->rhead != kring->nr_hwcur || nm_kr_wouldblock(kring)) in netmap_pt_guest_rxsync()
1090 kring->name, atok->head, atok->cur, ktoa->hwtail, in netmap_pt_guest_rxsync()
1091 kring->rhead, kring->rcur, kring->nr_hwtail); in netmap_pt_guest_rxsync()