Lines Matching refs:queue
333 const zbookmark_phys_t *zb, dsl_scan_io_queue_t *queue);
334 static void scan_io_queue_insert_impl(dsl_scan_io_queue_t *queue,
1239 dsl_scan_io_queue_t *queue; in dsl_scan_should_clear() local
1242 queue = tvd->vdev_scan_io_queue; in dsl_scan_should_clear()
1243 if (queue != NULL) { in dsl_scan_should_clear()
1248 mused += zfs_btree_numnodes(&queue->q_exts_by_size) * in dsl_scan_should_clear()
1249 3 * sizeof (range_seg_gap_t) + queue->q_sio_memused; in dsl_scan_should_clear()
2730 scan_io_queue_issue(dsl_scan_io_queue_t *queue, list_t *io_list) in scan_io_queue_issue() argument
2732 dsl_scan_t *scn = queue->q_scn; in scan_io_queue_issue()
2748 &sio->sio_zb, queue); in scan_io_queue_issue()
2750 scan_io_queues_update_zio_stats(queue, &bp); in scan_io_queue_issue()
2766 scan_io_queue_gather(dsl_scan_io_queue_t *queue, range_seg_t *rs, list_t *list) in scan_io_queue_gather() argument
2774 ASSERT(MUTEX_HELD(&queue->q_vd->vdev_scan_io_queue_lock)); in scan_io_queue_gather()
2778 SIO_SET_OFFSET(srch_sio, rs_get_start(rs, queue->q_exts_by_addr)); in scan_io_queue_gather()
2784 sio = avl_find(&queue->q_sios_by_addr, srch_sio, &idx); in scan_io_queue_gather()
2788 sio = avl_nearest(&queue->q_sios_by_addr, idx, AVL_AFTER); in scan_io_queue_gather()
2791 queue->q_exts_by_addr) && num_sios <= 32) { in scan_io_queue_gather()
2793 queue->q_exts_by_addr)); in scan_io_queue_gather()
2795 queue->q_exts_by_addr)); in scan_io_queue_gather()
2797 next_sio = AVL_NEXT(&queue->q_sios_by_addr, sio); in scan_io_queue_gather()
2798 avl_remove(&queue->q_sios_by_addr, sio); in scan_io_queue_gather()
2799 queue->q_sio_memused -= SIO_GET_MUSED(sio); in scan_io_queue_gather()
2814 queue->q_exts_by_addr)) { in scan_io_queue_gather()
2815 range_tree_adjust_fill(queue->q_exts_by_addr, rs, in scan_io_queue_gather()
2817 range_tree_resize_segment(queue->q_exts_by_addr, rs, in scan_io_queue_gather()
2819 queue->q_exts_by_addr) - SIO_GET_OFFSET(sio)); in scan_io_queue_gather()
2823 uint64_t rstart = rs_get_start(rs, queue->q_exts_by_addr); in scan_io_queue_gather()
2824 uint64_t rend = rs_get_end(rs, queue->q_exts_by_addr); in scan_io_queue_gather()
2825 range_tree_remove(queue->q_exts_by_addr, rstart, rend - rstart); in scan_io_queue_gather()
2843 scan_io_queue_fetch_ext(dsl_scan_io_queue_t *queue) in scan_io_queue_fetch_ext() argument
2845 dsl_scan_t *scn = queue->q_scn; in scan_io_queue_fetch_ext()
2846 range_tree_t *rt = queue->q_exts_by_addr; in scan_io_queue_fetch_ext()
2848 ASSERT(MUTEX_HELD(&queue->q_vd->vdev_scan_io_queue_lock)); in scan_io_queue_fetch_ext()
2861 zfs_btree_first(&queue->q_exts_by_size, NULL); in scan_io_queue_fetch_ext()
2893 range_seg_t *size_rs = zfs_btree_first(&queue->q_exts_by_size, in scan_io_queue_fetch_ext()
2913 dsl_scan_io_queue_t *queue = arg; in scan_io_queues_run_one() local
2914 kmutex_t *q_lock = &queue->q_vd->vdev_scan_io_queue_lock; in scan_io_queues_run_one()
2920 uint64_t nr_leaves = dsl_scan_count_leaves(queue->q_vd); in scan_io_queues_run_one()
2922 ASSERT(queue->q_scn->scn_is_sorted); in scan_io_queues_run_one()
2929 queue->q_maxinflight_bytes = in scan_io_queues_run_one()
2933 queue->q_total_seg_size_this_txg = 0; in scan_io_queues_run_one()
2934 queue->q_segs_this_txg = 0; in scan_io_queues_run_one()
2935 queue->q_total_zio_size_this_txg = 0; in scan_io_queues_run_one()
2936 queue->q_zios_this_txg = 0; in scan_io_queues_run_one()
2939 while ((rs = (range_seg_t *)scan_io_queue_fetch_ext(queue)) != NULL) { in scan_io_queues_run_one()
2953 more_left = scan_io_queue_gather(queue, rs, &sio_list); in scan_io_queues_run_one()
2970 suspended = scan_io_queue_issue(queue, &sio_list); in scan_io_queues_run_one()
2977 scan_io_queues_update_seg_stats(queue, seg_start, seg_end); in scan_io_queues_run_one()
2990 scan_io_queue_insert_impl(queue, sio); in scan_io_queues_run_one()
3104 dsl_scan_io_queue_t *queue = vd->vdev_scan_io_queue; in dsl_scan_update_stats() local
3106 if (queue == NULL) in dsl_scan_update_stats()
3109 seg_size_total += queue->q_total_seg_size_this_txg; in dsl_scan_update_stats()
3110 zio_size_total += queue->q_total_zio_size_this_txg; in dsl_scan_update_stats()
3111 seg_count_total += queue->q_segs_this_txg; in dsl_scan_update_stats()
3112 zio_count_total += queue->q_zios_this_txg; in dsl_scan_update_stats()
3743 scan_io_queue_insert_impl(dsl_scan_io_queue_t *queue, scan_io_t *sio) in scan_io_queue_insert_impl() argument
3747 dsl_scan_t *scn = queue->q_scn; in scan_io_queue_insert_impl()
3749 ASSERT(MUTEX_HELD(&queue->q_vd->vdev_scan_io_queue_lock)); in scan_io_queue_insert_impl()
3751 if (avl_find(&queue->q_sios_by_addr, sio, &idx) != NULL) { in scan_io_queue_insert_impl()
3757 avl_insert(&queue->q_sios_by_addr, sio, idx); in scan_io_queue_insert_impl()
3758 queue->q_sio_memused += SIO_GET_MUSED(sio); in scan_io_queue_insert_impl()
3759 range_tree_add(queue->q_exts_by_addr, SIO_GET_OFFSET(sio), asize); in scan_io_queue_insert_impl()
3769 scan_io_queue_insert(dsl_scan_io_queue_t *queue, const blkptr_t *bp, int dva_i, in scan_io_queue_insert() argument
3772 dsl_scan_t *scn = queue->q_scn; in scan_io_queue_insert()
3776 ASSERT(MUTEX_HELD(&queue->q_vd->vdev_scan_io_queue_lock)); in scan_io_queue_insert()
3789 scan_io_queue_insert_impl(queue, sio); in scan_io_queue_insert()
3897 dsl_scan_io_queue_t *queue = zio->io_private; in dsl_scan_scrub_done() local
3901 if (queue == NULL) { in dsl_scan_scrub_done()
3908 mutex_enter(&queue->q_vd->vdev_scan_io_queue_lock); in dsl_scan_scrub_done()
3909 ASSERT3U(queue->q_inflight_bytes, >=, BP_GET_PSIZE(bp)); in dsl_scan_scrub_done()
3910 queue->q_inflight_bytes -= BP_GET_PSIZE(bp); in dsl_scan_scrub_done()
3911 cv_broadcast(&queue->q_zio_cv); in dsl_scan_scrub_done()
3912 mutex_exit(&queue->q_vd->vdev_scan_io_queue_lock); in dsl_scan_scrub_done()
3930 const zbookmark_phys_t *zb, dsl_scan_io_queue_t *queue) in scan_exec_io() argument
3937 if (queue == NULL) { in scan_exec_io()
3944 kmutex_t *q_lock = &queue->q_vd->vdev_scan_io_queue_lock; in scan_exec_io()
3947 while (queue->q_inflight_bytes >= queue->q_maxinflight_bytes) in scan_exec_io()
3948 cv_wait(&queue->q_zio_cv, q_lock); in scan_exec_io()
3949 queue->q_inflight_bytes += BP_GET_PSIZE(bp); in scan_exec_io()
3955 dsl_scan_scrub_done, queue, ZIO_PRIORITY_SCRUB, zio_flags, zb)); in scan_exec_io()
4053 dsl_scan_io_queue_destroy(dsl_scan_io_queue_t *queue) in dsl_scan_io_queue_destroy() argument
4055 dsl_scan_t *scn = queue->q_scn; in dsl_scan_io_queue_destroy()
4060 ASSERT(MUTEX_HELD(&queue->q_vd->vdev_scan_io_queue_lock)); in dsl_scan_io_queue_destroy()
4062 while ((sio = avl_destroy_nodes(&queue->q_sios_by_addr, &cookie)) != in dsl_scan_io_queue_destroy()
4064 ASSERT(range_tree_contains(queue->q_exts_by_addr, in dsl_scan_io_queue_destroy()
4067 queue->q_sio_memused -= SIO_GET_MUSED(sio); in dsl_scan_io_queue_destroy()
4071 ASSERT0(queue->q_sio_memused); in dsl_scan_io_queue_destroy()
4073 range_tree_vacate(queue->q_exts_by_addr, NULL, queue); in dsl_scan_io_queue_destroy()
4074 range_tree_destroy(queue->q_exts_by_addr); in dsl_scan_io_queue_destroy()
4075 avl_destroy(&queue->q_sios_by_addr); in dsl_scan_io_queue_destroy()
4076 cv_destroy(&queue->q_zio_cv); in dsl_scan_io_queue_destroy()
4078 kmem_free(queue, sizeof (*queue)); in dsl_scan_io_queue_destroy()
4125 dsl_scan_io_queue_t *queue; in dsl_scan_freed_dva() local
4133 queue = vdev->vdev_scan_io_queue; in dsl_scan_freed_dva()
4136 if (queue == NULL) { in dsl_scan_freed_dva()
4164 sio = avl_find(&queue->q_sios_by_addr, srch_sio, &idx); in dsl_scan_freed_dva()
4174 avl_remove(&queue->q_sios_by_addr, sio); in dsl_scan_freed_dva()
4175 queue->q_sio_memused -= SIO_GET_MUSED(sio); in dsl_scan_freed_dva()
4177 ASSERT(range_tree_contains(queue->q_exts_by_addr, start, size)); in dsl_scan_freed_dva()
4178 range_tree_remove_fill(queue->q_exts_by_addr, start, size); in dsl_scan_freed_dva()