Lines Matching refs:ring

107 vq_page_hold(viona_vring_t *ring, uint64_t gpa, bool writable)  in vq_page_hold()  argument
109 ASSERT3P(ring->vr_lease, !=, NULL); in vq_page_hold()
116 return (vmm_drv_page_hold(ring->vr_lease, gpa, prot)); in vq_page_hold()
133 vq_region_hold(viona_vring_t *ring, uint64_t gpa, uint32_t len, in vq_region_hold() argument
152 vmp = vq_page_hold(ring, gpa & PAGEMASK, writable); in vq_region_hold()
174 vmp = vq_page_hold(ring, gpa, writable); in vq_region_hold()
196 viona_vring_t *ring = arg; in viona_ring_lease_expire_cb() local
198 mutex_enter(&ring->vr_lock); in viona_ring_lease_expire_cb()
199 cv_broadcast(&ring->vr_cv); in viona_ring_lease_expire_cb()
200 mutex_exit(&ring->vr_lock); in viona_ring_lease_expire_cb()
207 viona_ring_lease_drop(viona_vring_t *ring) in viona_ring_lease_drop() argument
209 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_ring_lease_drop()
211 if (ring->vr_lease != NULL) { in viona_ring_lease_drop()
212 vmm_hold_t *hold = ring->vr_link->l_vm_hold; in viona_ring_lease_drop()
220 viona_ring_unmap(ring); in viona_ring_lease_drop()
222 vmm_drv_lease_break(hold, ring->vr_lease); in viona_ring_lease_drop()
223 ring->vr_lease = NULL; in viona_ring_lease_drop()
228 viona_ring_lease_renew(viona_vring_t *ring) in viona_ring_lease_renew() argument
230 vmm_hold_t *hold = ring->vr_link->l_vm_hold; in viona_ring_lease_renew()
233 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_ring_lease_renew()
235 viona_ring_lease_drop(ring); in viona_ring_lease_renew()
241 ring->vr_lease = vmm_drv_lease_sign(hold, viona_ring_lease_expire_cb, in viona_ring_lease_renew()
242 ring); in viona_ring_lease_renew()
243 if (ring->vr_lease != NULL) { in viona_ring_lease_renew()
245 if (ring->vr_pa != 0 && ring->vr_size != 0) { in viona_ring_lease_renew()
250 if (!viona_ring_map(ring, ring->vr_state == VRS_INIT)) { in viona_ring_lease_renew()
251 viona_ring_lease_drop(ring); in viona_ring_lease_renew()
256 return (ring->vr_lease != NULL); in viona_ring_lease_renew()
260 viona_ring_alloc(viona_link_t *link, viona_vring_t *ring) in viona_ring_alloc() argument
262 ring->vr_link = link; in viona_ring_alloc()
263 mutex_init(&ring->vr_lock, NULL, MUTEX_DRIVER, NULL); in viona_ring_alloc()
264 cv_init(&ring->vr_cv, NULL, CV_DRIVER, NULL); in viona_ring_alloc()
265 mutex_init(&ring->vr_a_mutex, NULL, MUTEX_DRIVER, NULL); in viona_ring_alloc()
266 mutex_init(&ring->vr_u_mutex, NULL, MUTEX_DRIVER, NULL); in viona_ring_alloc()
270 viona_ring_misc_free(viona_vring_t *ring) in viona_ring_misc_free() argument
272 const uint_t qsz = ring->vr_size; in viona_ring_misc_free()
274 viona_tx_ring_free(ring, qsz); in viona_ring_misc_free()
278 viona_ring_free(viona_vring_t *ring) in viona_ring_free() argument
280 mutex_destroy(&ring->vr_lock); in viona_ring_free()
281 cv_destroy(&ring->vr_cv); in viona_ring_free()
282 mutex_destroy(&ring->vr_a_mutex); in viona_ring_free()
283 mutex_destroy(&ring->vr_u_mutex); in viona_ring_free()
284 ring->vr_link = NULL; in viona_ring_free()
291 viona_vring_t *ring; in viona_ring_init() local
308 ring = &link->l_vrings[idx]; in viona_ring_init()
309 mutex_enter(&ring->vr_lock); in viona_ring_init()
310 if (ring->vr_state != VRS_RESET) { in viona_ring_init()
311 mutex_exit(&ring->vr_lock); in viona_ring_init()
314 VERIFY(ring->vr_state_flags == 0); in viona_ring_init()
316 ring->vr_lease = NULL; in viona_ring_init()
317 if (!viona_ring_lease_renew(ring)) { in viona_ring_init()
322 ring->vr_size = qsz; in viona_ring_init()
323 ring->vr_mask = (ring->vr_size - 1); in viona_ring_init()
324 ring->vr_pa = pa; in viona_ring_init()
325 if (!viona_ring_map(ring, true)) { in viona_ring_init()
331 ring->vr_cur_aidx = params->vrp_avail_idx; in viona_ring_init()
332 ring->vr_cur_uidx = params->vrp_used_idx; in viona_ring_init()
335 viona_tx_ring_alloc(ring, qsz); in viona_ring_init()
339 ring->vr_msi_addr = 0; in viona_ring_init()
340 ring->vr_msi_msg = 0; in viona_ring_init()
343 bzero(&ring->vr_stats, sizeof (ring->vr_stats)); in viona_ring_init()
344 bzero(&ring->vr_err_stats, sizeof (ring->vr_err_stats)); in viona_ring_init()
346 t = viona_create_worker(ring); in viona_ring_init()
351 ring->vr_worker_thread = t; in viona_ring_init()
352 ring->vr_state = VRS_SETUP; in viona_ring_init()
353 cv_broadcast(&ring->vr_cv); in viona_ring_init()
354 mutex_exit(&ring->vr_lock); in viona_ring_init()
358 viona_ring_lease_drop(ring); in viona_ring_init()
359 viona_ring_misc_free(ring); in viona_ring_init()
360 ring->vr_size = 0; in viona_ring_init()
361 ring->vr_mask = 0; in viona_ring_init()
362 ring->vr_pa = 0; in viona_ring_init()
363 ring->vr_cur_aidx = 0; in viona_ring_init()
364 ring->vr_cur_uidx = 0; in viona_ring_init()
365 mutex_exit(&ring->vr_lock); in viona_ring_init()
373 viona_vring_t *ring; in viona_ring_get_state() local
379 ring = &link->l_vrings[idx]; in viona_ring_get_state()
380 mutex_enter(&ring->vr_lock); in viona_ring_get_state()
382 params->vrp_size = ring->vr_size; in viona_ring_get_state()
383 params->vrp_pa = ring->vr_pa; in viona_ring_get_state()
385 if (ring->vr_state == VRS_RUN) { in viona_ring_get_state()
387 mutex_enter(&ring->vr_a_mutex); in viona_ring_get_state()
388 params->vrp_avail_idx = ring->vr_cur_aidx; in viona_ring_get_state()
389 mutex_exit(&ring->vr_a_mutex); in viona_ring_get_state()
390 mutex_enter(&ring->vr_u_mutex); in viona_ring_get_state()
391 params->vrp_used_idx = ring->vr_cur_uidx; in viona_ring_get_state()
392 mutex_exit(&ring->vr_u_mutex); in viona_ring_get_state()
395 params->vrp_avail_idx = ring->vr_cur_aidx; in viona_ring_get_state()
396 params->vrp_used_idx = ring->vr_cur_uidx; in viona_ring_get_state()
399 mutex_exit(&ring->vr_lock); in viona_ring_get_state()
405 viona_ring_reset(viona_vring_t *ring, boolean_t heed_signals) in viona_ring_reset() argument
407 mutex_enter(&ring->vr_lock); in viona_ring_reset()
408 if (ring->vr_state == VRS_RESET) { in viona_ring_reset()
409 mutex_exit(&ring->vr_lock); in viona_ring_reset()
413 if ((ring->vr_state_flags & VRSF_REQ_STOP) == 0) { in viona_ring_reset()
414 ring->vr_state_flags |= VRSF_REQ_STOP; in viona_ring_reset()
415 cv_broadcast(&ring->vr_cv); in viona_ring_reset()
417 while (ring->vr_state != VRS_RESET) { in viona_ring_reset()
419 cv_wait(&ring->vr_cv, &ring->vr_lock); in viona_ring_reset()
423 rs = cv_wait_sig(&ring->vr_cv, &ring->vr_lock); in viona_ring_reset()
424 if (rs <= 0 && ring->vr_state != VRS_RESET) { in viona_ring_reset()
425 mutex_exit(&ring->vr_lock); in viona_ring_reset()
430 mutex_exit(&ring->vr_lock); in viona_ring_reset()
435 viona_ring_map(viona_vring_t *ring, bool defer_dirty) in viona_ring_map() argument
437 const uint16_t qsz = ring->vr_size; in viona_ring_map()
438 uintptr_t pa = ring->vr_pa; in viona_ring_map()
445 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_ring_map()
446 ASSERT3P(ring->vr_map_pages, ==, NULL); in viona_ring_map()
449 ring->vr_map_pages = kmem_zalloc(npages * sizeof (void *), KM_SLEEP); in viona_ring_map()
475 vmp = vmm_drv_page_hold_ext(ring->vr_lease, pa, in viona_ring_map()
478 viona_ring_unmap(ring); in viona_ring_map()
487 ring->vr_map_hold = vmp; in viona_ring_map()
492 ring->vr_map_pages[i] = vmm_drv_page_writable(vmp); in viona_ring_map()
499 viona_ring_mark_dirty(viona_vring_t *ring) in viona_ring_mark_dirty() argument
501 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_ring_mark_dirty()
502 ASSERT(ring->vr_map_hold != NULL); in viona_ring_mark_dirty()
504 for (vmm_page_t *vp = ring->vr_map_hold; vp != NULL; in viona_ring_mark_dirty()
511 viona_ring_unmap(viona_vring_t *ring) in viona_ring_unmap() argument
513 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_ring_unmap()
515 void **map = ring->vr_map_pages; in viona_ring_unmap()
517 const uint_t npages = LEGACY_VQ_PAGES(ring->vr_size); in viona_ring_unmap()
519 ring->vr_map_pages = NULL; in viona_ring_unmap()
521 vmm_drv_page_release_chain(ring->vr_map_hold); in viona_ring_unmap()
522 ring->vr_map_hold = NULL; in viona_ring_unmap()
524 ASSERT3P(ring->vr_map_hold, ==, NULL); in viona_ring_unmap()
529 viona_ring_addr(viona_vring_t *ring, uint_t off) in viona_ring_addr() argument
531 ASSERT3P(ring->vr_map_pages, !=, NULL); in viona_ring_addr()
532 ASSERT3U(LEGACY_VQ_SIZE(ring->vr_size), >, off); in viona_ring_addr()
536 return ((caddr_t)ring->vr_map_pages[page_num] + page_off); in viona_ring_addr()
540 viona_intr_ring(viona_vring_t *ring, boolean_t skip_flags_check) in viona_intr_ring() argument
543 volatile uint16_t *avail_flags = viona_ring_addr(ring, in viona_intr_ring()
544 LEGACY_AVAIL_FLAGS_OFF(ring->vr_size)); in viona_intr_ring()
551 mutex_enter(&ring->vr_lock); in viona_intr_ring()
552 uint64_t addr = ring->vr_msi_addr; in viona_intr_ring()
553 uint64_t msg = ring->vr_msi_msg; in viona_intr_ring()
554 mutex_exit(&ring->vr_lock); in viona_intr_ring()
557 (void) vmm_drv_msi(ring->vr_lease, addr, msg); in viona_intr_ring()
560 if (atomic_cas_uint(&ring->vr_intr_enabled, 0, 1) == 0) { in viona_intr_ring()
561 pollwakeup(&ring->vr_link->l_pollhead, POLLRDBAND); in viona_intr_ring()
567 vring_stop_req(const viona_vring_t *ring) in vring_stop_req() argument
569 return ((ring->vr_state_flags & VRSF_REQ_STOP) != 0); in vring_stop_req()
573 vring_pause_req(const viona_vring_t *ring) in vring_pause_req() argument
575 return ((ring->vr_state_flags & VRSF_REQ_PAUSE) != 0); in vring_pause_req()
579 vring_start_req(const viona_vring_t *ring) in vring_start_req() argument
581 return ((ring->vr_state_flags & VRSF_REQ_START) != 0); in vring_start_req()
593 vring_need_bail_ext(const viona_vring_t *ring, bool stop_only) in vring_need_bail_ext() argument
595 ASSERT(MUTEX_HELD(&ring->vr_lock)); in vring_need_bail_ext()
597 if (vring_stop_req(ring) || in vring_need_bail_ext()
598 (!stop_only && vring_pause_req(ring))) { in vring_need_bail_ext()
602 kthread_t *t = ring->vr_worker_thread; in vring_need_bail_ext()
615 vring_need_bail(const viona_vring_t *ring) in vring_need_bail() argument
617 return (vring_need_bail_ext(ring, false)); in vring_need_bail()
621 viona_ring_pause(viona_vring_t *ring) in viona_ring_pause() argument
623 mutex_enter(&ring->vr_lock); in viona_ring_pause()
624 switch (ring->vr_state) { in viona_ring_pause()
635 ring->vr_state_flags &= ~VRSF_REQ_START; in viona_ring_pause()
636 mutex_exit(&ring->vr_lock); in viona_ring_pause()
640 if ((ring->vr_state_flags & VRSF_REQ_STOP) != 0) { in viona_ring_pause()
642 mutex_exit(&ring->vr_lock); in viona_ring_pause()
647 ring->vr_state_flags |= VRSF_REQ_PAUSE; in viona_ring_pause()
648 cv_broadcast(&ring->vr_cv); in viona_ring_pause()
652 panic("invalid ring state %d", ring->vr_state); in viona_ring_pause()
657 int res = cv_wait_sig(&ring->vr_cv, &ring->vr_lock); in viona_ring_pause()
659 if (ring->vr_state == VRS_INIT || in viona_ring_pause()
660 (ring->vr_state_flags & VRSF_REQ_PAUSE) == 0) { in viona_ring_pause()
662 mutex_exit(&ring->vr_lock); in viona_ring_pause()
667 mutex_exit(&ring->vr_lock); in viona_ring_pause()
677 viona_vring_t *ring = (viona_vring_t *)arg; in viona_worker() local
678 viona_link_t *link = ring->vr_link; in viona_worker()
680 mutex_enter(&ring->vr_lock); in viona_worker()
681 VERIFY3U(ring->vr_state, ==, VRS_SETUP); in viona_worker()
684 if (vring_need_bail_ext(ring, true)) { in viona_worker()
690 ring->vr_state = VRS_INIT; in viona_worker()
691 cv_broadcast(&ring->vr_cv); in viona_worker()
693 while (!vring_start_req(ring)) { in viona_worker()
698 if (vmm_drv_lease_expired(ring->vr_lease)) { in viona_worker()
699 if (!viona_ring_lease_renew(ring)) { in viona_worker()
704 (void) cv_wait_sig(&ring->vr_cv, &ring->vr_lock); in viona_worker()
706 if (vring_pause_req(ring)) { in viona_worker()
708 ring->vr_state_flags &= ~VRSF_REQ_PAUSE; in viona_worker()
710 if (vring_need_bail_ext(ring, true)) { in viona_worker()
715 ASSERT((ring->vr_state_flags & VRSF_REQ_START) != 0); in viona_worker()
716 ring->vr_state = VRS_RUN; in viona_worker()
717 ring->vr_state_flags &= ~VRSF_REQ_START; in viona_worker()
718 viona_ring_mark_dirty(ring); in viona_worker()
721 if (vmm_drv_lease_expired(ring->vr_lease)) { in viona_worker()
722 if (!viona_ring_lease_renew(ring)) { in viona_worker()
728 if (ring == &link->l_vrings[VIONA_VQ_RX]) { in viona_worker()
729 viona_worker_rx(ring, link); in viona_worker()
730 } else if (ring == &link->l_vrings[VIONA_VQ_TX]) { in viona_worker()
731 viona_worker_tx(ring, link); in viona_worker()
733 panic("unexpected ring: %p", (void *)ring); in viona_worker()
736 VERIFY3U(ring->vr_state, ==, VRS_STOP); in viona_worker()
737 VERIFY3U(ring->vr_xfer_outstanding, ==, 0); in viona_worker()
743 viona_ring_consolidate_stats(ring); in viona_worker()
746 if (vring_pause_req(ring)) { in viona_worker()
747 ring->vr_state_flags &= ~VRSF_REQ_PAUSE; in viona_worker()
749 if (vring_need_bail_ext(ring, true)) { in viona_worker()
759 viona_ring_unmap(ring); in viona_worker()
760 if (viona_ring_map(ring, true)) { in viona_worker()
771 viona_ring_misc_free(ring); in viona_worker()
773 viona_ring_lease_drop(ring); in viona_worker()
774 ring->vr_cur_aidx = 0; in viona_worker()
775 ring->vr_size = 0; in viona_worker()
776 ring->vr_mask = 0; in viona_worker()
777 ring->vr_pa = 0; in viona_worker()
778 ring->vr_state = VRS_RESET; in viona_worker()
779 ring->vr_state_flags = 0; in viona_worker()
780 ring->vr_worker_thread = NULL; in viona_worker()
781 cv_broadcast(&ring->vr_cv); in viona_worker()
782 mutex_exit(&ring->vr_lock); in viona_worker()
789 viona_create_worker(viona_vring_t *ring) in viona_create_worker() argument
796 ASSERT(MUTEX_HELD(&ring->vr_lock)); in viona_create_worker()
797 ASSERT(ring->vr_state == VRS_RESET); in viona_create_worker()
800 lwp = lwp_create(viona_worker, (void *)ring, 0, p, TS_STOPPED, in viona_create_worker()
816 vq_read_desc(viona_vring_t *ring, uint16_t idx, struct virtio_desc *descp) in vq_read_desc() argument
820 ASSERT3U(idx, <, ring->vr_size); in vq_read_desc()
822 bcopy(viona_ring_addr(ring, entry_off), descp, sizeof (*descp)); in vq_read_desc()
826 vq_read_avail(viona_vring_t *ring, uint16_t idx) in vq_read_avail() argument
828 ASSERT3U(idx, <, ring->vr_size); in vq_read_avail()
831 viona_ring_addr(ring, LEGACY_AVAIL_ENT_OFF(ring->vr_size, idx)); in vq_read_avail()
841 vq_map_desc_bufs(viona_vring_t *ring, const struct virtio_desc *desc, in vq_map_desc_bufs() argument
845 VIONA_PROBE2(desc_bad_len, viona_vring_t *, ring, in vq_map_desc_bufs()
847 VIONA_RING_STAT_INCR(ring, desc_bad_len); in vq_map_desc_bufs()
850 VIONA_PROBE1(len_overflow, viona_vring_t *, ring); in vq_map_desc_bufs()
851 VIONA_RING_STAT_INCR(ring, len_overflow); in vq_map_desc_bufs()
855 int err = vq_region_hold(ring, desc->vd_addr, desc->vd_len, in vq_map_desc_bufs()
860 VIONA_PROBE1(too_many_desc, viona_vring_t *, ring); in vq_map_desc_bufs()
861 VIONA_RING_STAT_INCR(ring, too_many_desc); in vq_map_desc_bufs()
863 VIONA_PROBE_BAD_RING_ADDR(ring, desc->vd_addr); in vq_map_desc_bufs()
864 VIONA_RING_STAT_INCR(ring, bad_ring_addr); in vq_map_desc_bufs()
875 vq_map_indir_desc_bufs(viona_vring_t *ring, const struct virtio_desc *desc, in vq_map_indir_desc_bufs() argument
881 indir_count > ring->vr_size || in vq_map_indir_desc_bufs()
883 VIONA_PROBE2(indir_bad_len, viona_vring_t *, ring, in vq_map_indir_desc_bufs()
885 VIONA_RING_STAT_INCR(ring, indir_bad_len); in vq_map_indir_desc_bufs()
909 vmp = vq_page_hold(ring, indir_page, false); in vq_map_indir_desc_bufs()
911 VIONA_PROBE_BAD_RING_ADDR(ring, indir_page); in vq_map_indir_desc_bufs()
912 VIONA_RING_STAT_INCR(ring, bad_ring_addr); in vq_map_indir_desc_bufs()
929 VIONA_PROBE1(indir_bad_nest, viona_vring_t *, ring); in vq_map_indir_desc_bufs()
930 VIONA_RING_STAT_INCR(ring, indir_bad_nest); in vq_map_indir_desc_bufs()
934 VIONA_PROBE2(desc_bad_len, viona_vring_t *, ring, in vq_map_indir_desc_bufs()
936 VIONA_RING_STAT_INCR(ring, desc_bad_len); in vq_map_indir_desc_bufs()
941 err = vq_map_desc_bufs(ring, &vp, region); in vq_map_indir_desc_bufs()
951 VIONA_PROBE1(too_many_desc, viona_vring_t *, ring); in vq_map_indir_desc_bufs()
952 VIONA_RING_STAT_INCR(ring, too_many_desc); in vq_map_indir_desc_bufs()
959 VIONA_PROBE3(indir_bad_next, viona_vring_t *, ring, in vq_map_indir_desc_bufs()
961 VIONA_RING_STAT_INCR(ring, indir_bad_next); in vq_map_indir_desc_bufs()
974 vq_popchain(viona_vring_t *ring, struct iovec *iov, uint_t niov, in vq_popchain() argument
988 mutex_enter(&ring->vr_a_mutex); in vq_popchain()
989 idx = ring->vr_cur_aidx; in vq_popchain()
990 ndesc = viona_ring_num_avail(ring); in vq_popchain()
993 mutex_exit(&ring->vr_a_mutex); in vq_popchain()
996 if (ndesc > ring->vr_size) { in vq_popchain()
1004 VIONA_PROBE2(ndesc_too_high, viona_vring_t *, ring, in vq_popchain()
1006 VIONA_RING_STAT_INCR(ring, ndesc_too_high); in vq_popchain()
1009 head = vq_read_avail(ring, idx & ring->vr_mask); in vq_popchain()
1013 if (next >= ring->vr_size) { in vq_popchain()
1014 VIONA_PROBE2(bad_idx, viona_vring_t *, ring, in vq_popchain()
1016 VIONA_RING_STAT_INCR(ring, bad_idx); in vq_popchain()
1020 vq_read_desc(ring, next, &vdir); in vq_popchain()
1022 if (vq_map_desc_bufs(ring, &vdir, &region) != 0) { in vq_popchain()
1033 viona_vring_t *, ring, in vq_popchain()
1035 VIONA_RING_STAT_INCR(ring, indir_bad_next); in vq_popchain()
1039 if (vq_map_indir_desc_bufs(ring, &vdir, &region) != 0) { in vq_popchain()
1045 ring->vr_cur_aidx++; in vq_popchain()
1046 mutex_exit(&ring->vr_a_mutex); in vq_popchain()
1057 mutex_exit(&ring->vr_a_mutex); in vq_popchain()
1070 vq_write_used_ent(viona_vring_t *ring, uint16_t idx, uint16_t cookie, in vq_write_used_ent() argument
1079 const uint_t used_id_off = LEGACY_USED_ENT_OFF(ring->vr_size, idx); in vq_write_used_ent()
1081 volatile uint32_t *idp = viona_ring_addr(ring, used_id_off); in vq_write_used_ent()
1082 volatile uint32_t *lenp = viona_ring_addr(ring, used_len_off); in vq_write_used_ent()
1084 ASSERT(MUTEX_HELD(&ring->vr_u_mutex)); in vq_write_used_ent()
1091 vq_write_used_idx(viona_vring_t *ring, uint16_t idx) in vq_write_used_idx() argument
1093 ASSERT(MUTEX_HELD(&ring->vr_u_mutex)); in vq_write_used_idx()
1096 viona_ring_addr(ring, LEGACY_USED_IDX_OFF(ring->vr_size)); in vq_write_used_idx()
1101 vq_pushchain(viona_vring_t *ring, uint32_t len, uint16_t cookie) in vq_pushchain() argument
1105 mutex_enter(&ring->vr_u_mutex); in vq_pushchain()
1107 uidx = ring->vr_cur_uidx; in vq_pushchain()
1108 vq_write_used_ent(ring, uidx & ring->vr_mask, cookie, len); in vq_pushchain()
1112 vq_write_used_idx(ring, uidx); in vq_pushchain()
1113 ring->vr_cur_uidx = uidx; in vq_pushchain()
1115 mutex_exit(&ring->vr_u_mutex); in vq_pushchain()
1119 vq_pushchain_many(viona_vring_t *ring, uint_t num_bufs, used_elem_t *elem) in vq_pushchain_many() argument
1123 mutex_enter(&ring->vr_u_mutex); in vq_pushchain_many()
1125 uidx = ring->vr_cur_uidx; in vq_pushchain_many()
1128 vq_write_used_ent(ring, uidx & ring->vr_mask, elem[i].id, in vq_pushchain_many()
1133 vq_write_used_idx(ring, uidx); in vq_pushchain_many()
1134 ring->vr_cur_uidx = uidx; in vq_pushchain_many()
1136 mutex_exit(&ring->vr_u_mutex); in vq_pushchain_many()
1143 viona_ring_disable_notify(viona_vring_t *ring) in viona_ring_disable_notify() argument
1146 viona_ring_addr(ring, LEGACY_USED_FLAGS_OFF(ring->vr_size)); in viona_ring_disable_notify()
1155 viona_ring_enable_notify(viona_vring_t *ring) in viona_ring_enable_notify() argument
1158 viona_ring_addr(ring, LEGACY_USED_FLAGS_OFF(ring->vr_size)); in viona_ring_enable_notify()
1172 viona_ring_num_avail(viona_vring_t *ring) in viona_ring_num_avail() argument
1175 viona_ring_addr(ring, LEGACY_AVAIL_IDX_OFF(ring->vr_size)); in viona_ring_num_avail()
1177 return (*avail_idx - ring->vr_cur_aidx); in viona_ring_num_avail()
1182 viona_ring_stat_accept(viona_vring_t *ring, uint32_t len) in viona_ring_stat_accept() argument
1184 atomic_inc_64(&ring->vr_stats.vts_packets); in viona_ring_stat_accept()
1185 atomic_add_64(&ring->vr_stats.vts_bytes, len); in viona_ring_stat_accept()
1192 viona_ring_stat_drop(viona_vring_t *ring) in viona_ring_stat_drop() argument
1194 atomic_inc_64(&ring->vr_stats.vts_drops); in viona_ring_stat_drop()
1201 viona_ring_stat_error(viona_vring_t *ring) in viona_ring_stat_error() argument
1203 atomic_inc_64(&ring->vr_stats.vts_errors); in viona_ring_stat_error()
1210 viona_ring_consolidate_stats(viona_vring_t *ring) in viona_ring_consolidate_stats() argument
1212 viona_link_t *link = ring->vr_link; in viona_ring_consolidate_stats()
1214 (ring == &link->l_vrings[VIONA_VQ_RX]) ? in viona_ring_consolidate_stats()
1218 lstat->vts_packets += ring->vr_stats.vts_packets; in viona_ring_consolidate_stats()
1219 lstat->vts_bytes += ring->vr_stats.vts_bytes; in viona_ring_consolidate_stats()
1220 lstat->vts_drops += ring->vr_stats.vts_drops; in viona_ring_consolidate_stats()
1221 lstat->vts_errors += ring->vr_stats.vts_errors; in viona_ring_consolidate_stats()
1222 bzero(&ring->vr_stats, sizeof (ring->vr_stats)); in viona_ring_consolidate_stats()