Lines Matching full:sm
87 space_map_iterate(space_map_t *sm, uint64_t end, sm_cb_t callback, void *arg) in space_map_iterate() argument
89 uint64_t blksz = sm->sm_blksz; in space_map_iterate()
92 ASSERT3U(end, <=, space_map_length(sm)); in space_map_iterate()
95 dmu_prefetch(sm->sm_os, space_map_object(sm), 0, 0, end, in space_map_iterate()
103 error = dmu_buf_hold(sm->sm_os, space_map_object(sm), in space_map_iterate()
167 uint64_t entry_offset = (raw_offset << sm->sm_shift) + in space_map_iterate()
168 sm->sm_start; in space_map_iterate()
169 uint64_t entry_run = raw_run << sm->sm_shift; in space_map_iterate()
171 VERIFY0(P2PHASE(entry_offset, 1ULL << sm->sm_shift)); in space_map_iterate()
172 VERIFY0(P2PHASE(entry_run, 1ULL << sm->sm_shift)); in space_map_iterate()
173 ASSERT3U(entry_offset, >=, sm->sm_start); in space_map_iterate()
174 ASSERT3U(entry_offset, <, sm->sm_start + sm->sm_size); in space_map_iterate()
175 ASSERT3U(entry_run, <=, sm->sm_size); in space_map_iterate()
177 sm->sm_start + sm->sm_size); in space_map_iterate()
203 space_map_reversed_last_block_entries(space_map_t *sm, uint64_t *buf, in space_map_reversed_last_block_entries() argument
215 sm->sm_phys->smp_length - sizeof (uint64_t); in space_map_reversed_last_block_entries()
216 error = dmu_buf_hold(sm->sm_os, space_map_object(sm), last_word_offset, in space_map_reversed_last_block_entries()
221 ASSERT3U(sm->sm_object, ==, db->db_object); in space_map_reversed_last_block_entries()
222 ASSERT3U(sm->sm_blksz, ==, db->db_size); in space_map_reversed_last_block_entries()
228 (sm->sm_phys->smp_length - db->db_offset) / sizeof (uint64_t); in space_map_reversed_last_block_entries()
276 space_map_incremental_destroy(space_map_t *sm, sm_cb_t callback, void *arg, in space_map_incremental_destroy() argument
279 uint64_t bufsz = MAX(sm->sm_blksz, SPA_MINBLOCKSIZE); in space_map_incremental_destroy()
282 dmu_buf_will_dirty(sm->sm_dbuf, tx); in space_map_incremental_destroy()
314 while (space_map_length(sm) > 0 && error == 0) { in space_map_incremental_destroy()
316 error = space_map_reversed_last_block_entries(sm, buf, bufsz, in space_map_incremental_destroy()
327 sm->sm_phys->smp_length -= sizeof (uint64_t); in space_map_incremental_destroy()
357 (raw_offset << sm->sm_shift) + sm->sm_start; in space_map_incremental_destroy()
358 uint64_t entry_run = raw_run << sm->sm_shift; in space_map_incremental_destroy()
360 VERIFY0(P2PHASE(entry_offset, 1ULL << sm->sm_shift)); in space_map_incremental_destroy()
361 VERIFY0(P2PHASE(entry_run, 1ULL << sm->sm_shift)); in space_map_incremental_destroy()
362 VERIFY3U(entry_offset, >=, sm->sm_start); in space_map_incremental_destroy()
363 VERIFY3U(entry_offset, <, sm->sm_start + sm->sm_size); in space_map_incremental_destroy()
364 VERIFY3U(entry_run, <=, sm->sm_size); in space_map_incremental_destroy()
366 sm->sm_start + sm->sm_size); in space_map_incremental_destroy()
379 sm->sm_phys->smp_alloc -= entry_run; in space_map_incremental_destroy()
381 sm->sm_phys->smp_alloc += entry_run; in space_map_incremental_destroy()
382 sm->sm_phys->smp_length -= words * sizeof (uint64_t); in space_map_incremental_destroy()
386 if (space_map_length(sm) == 0) { in space_map_incremental_destroy()
388 ASSERT0(space_map_allocated(sm)); in space_map_incremental_destroy()
423 space_map_load_length(space_map_t *sm, zfs_range_tree_t *rt, maptype_t maptype, in space_map_load_length() argument
431 zfs_range_tree_add(rt, sm->sm_start, sm->sm_size); in space_map_load_length()
434 smla.smla_sm = sm; in space_map_load_length()
436 int err = space_map_iterate(sm, length, in space_map_load_length()
450 space_map_load(space_map_t *sm, zfs_range_tree_t *rt, maptype_t maptype) in space_map_load() argument
452 return (space_map_load_length(sm, rt, maptype, space_map_length(sm))); in space_map_load()
456 space_map_histogram_clear(space_map_t *sm) in space_map_histogram_clear() argument
458 if (sm->sm_dbuf->db_size != sizeof (space_map_phys_t)) in space_map_histogram_clear()
461 memset(sm->sm_phys->smp_histogram, 0, in space_map_histogram_clear()
462 sizeof (sm->sm_phys->smp_histogram)); in space_map_histogram_clear()
466 space_map_histogram_verify(space_map_t *sm, zfs_range_tree_t *rt) in space_map_histogram_verify() argument
472 for (int i = 0; i < sm->sm_shift; i++) { in space_map_histogram_verify()
480 space_map_histogram_add(space_map_t *sm, zfs_range_tree_t *rt, dmu_tx_t *tx) in space_map_histogram_add() argument
485 VERIFY3U(space_map_object(sm), !=, 0); in space_map_histogram_add()
487 if (sm->sm_dbuf->db_size != sizeof (space_map_phys_t)) in space_map_histogram_add()
490 dmu_buf_will_dirty(sm->sm_dbuf, tx); in space_map_histogram_add()
492 ASSERT(space_map_histogram_verify(sm, rt)); in space_map_histogram_add()
501 for (int i = sm->sm_shift; i < ZFS_RANGE_TREE_HISTOGRAM_SIZE; i++) { in space_map_histogram_add()
512 ASSERT3U(i, >=, idx + sm->sm_shift); in space_map_histogram_add()
513 sm->sm_phys->smp_histogram[idx] += in space_map_histogram_add()
514 rt->rt_histogram[i] << (i - idx - sm->sm_shift); in space_map_histogram_add()
522 ASSERT3U(idx + sm->sm_shift, ==, i); in space_map_histogram_add()
530 space_map_write_intro_debug(space_map_t *sm, maptype_t maptype, dmu_tx_t *tx) in space_map_write_intro_debug() argument
532 dmu_buf_will_dirty(sm->sm_dbuf, tx); in space_map_write_intro_debug()
539 dmu_write(sm->sm_os, space_map_object(sm), sm->sm_phys->smp_length, in space_map_write_intro_debug()
542 sm->sm_phys->smp_length += sizeof (dentry); in space_map_write_intro_debug()
553 space_map_write_seg(space_map_t *sm, uint64_t rstart, uint64_t rend, in space_map_write_seg() argument
570 ASSERT3U(db->db_size, ==, sm->sm_blksz); in space_map_write_seg()
573 uint64_t *block_end = block_base + (sm->sm_blksz / sizeof (uint64_t)); in space_map_write_seg()
575 (sm->sm_phys->smp_length - db->db_offset) / sizeof (uint64_t); in space_map_write_seg()
579 uint64_t size = (rend - rstart) >> sm->sm_shift; in space_map_write_seg()
580 uint64_t start = (rstart - sm->sm_start) >> sm->sm_shift; in space_map_write_seg()
583 ASSERT3U(rstart, >=, sm->sm_start); in space_map_write_seg()
584 ASSERT3U(rstart, <, sm->sm_start + sm->sm_size); in space_map_write_seg()
585 ASSERT3U(rend - rstart, <=, sm->sm_size); in space_map_write_seg()
586 ASSERT3U(rend, <=, sm->sm_start + sm->sm_size); in space_map_write_seg()
598 uint64_t next_word_offset = sm->sm_phys->smp_length; in space_map_write_seg()
599 VERIFY0(dmu_buf_hold(sm->sm_os, in space_map_write_seg()
600 space_map_object(sm), next_word_offset, in space_map_write_seg()
607 ASSERT3U(db->db_size, ==, sm->sm_blksz); in space_map_write_seg()
628 sm->sm_phys->smp_length += sizeof (uint64_t); in space_map_write_seg()
659 sm->sm_phys->smp_length += words * sizeof (uint64_t); in space_map_write_seg()
673 space_map_write_impl(space_map_t *sm, zfs_range_tree_t *rt, maptype_t maptype, in space_map_write_impl() argument
679 space_map_write_intro_debug(sm, maptype, tx); in space_map_write_impl()
686 uint64_t initial_objsize = sm->sm_phys->smp_length; in space_map_write_impl()
688 space_map_estimate_optimal_size(sm, rt, SM_NO_VDEVID); in space_map_write_impl()
697 uint64_t next_word_offset = sm->sm_phys->smp_length; in space_map_write_impl()
698 VERIFY0(dmu_buf_hold(sm->sm_os, space_map_object(sm), in space_map_write_impl()
700 ASSERT3U(db->db_size, ==, sm->sm_blksz); in space_map_write_impl()
708 uint64_t offset = (zfs_rs_get_start(rs, rt) - sm->sm_start) >> in space_map_write_impl()
709 sm->sm_shift; in space_map_write_impl()
711 zfs_rs_get_start(rs, rt)) >> sm->sm_shift; in space_map_write_impl()
736 space_map_write_seg(sm, zfs_rs_get_start(rs, rt), in space_map_write_impl()
750 ASSERT3U(estimated_final_objsize, >=, sm->sm_phys->smp_length); in space_map_write_impl()
760 space_map_write(space_map_t *sm, zfs_range_tree_t *rt, maptype_t maptype, in space_map_write() argument
763 ASSERT(dsl_pool_sync_context(dmu_objset_pool(sm->sm_os))); in space_map_write()
764 VERIFY3U(space_map_object(sm), !=, 0); in space_map_write()
766 dmu_buf_will_dirty(sm->sm_dbuf, tx); in space_map_write()
773 sm->sm_phys->smp_object = sm->sm_object; in space_map_write()
776 VERIFY3U(sm->sm_object, ==, sm->sm_phys->smp_object); in space_map_write()
781 sm->sm_phys->smp_alloc += zfs_range_tree_space(rt); in space_map_write()
783 sm->sm_phys->smp_alloc -= zfs_range_tree_space(rt); in space_map_write()
788 space_map_write_impl(sm, rt, maptype, vdev_id, tx); in space_map_write()
799 space_map_open_impl(space_map_t *sm) in space_map_open_impl() argument
804 error = dmu_bonus_hold(sm->sm_os, sm->sm_object, sm, &sm->sm_dbuf); in space_map_open_impl()
808 dmu_object_size_from_db(sm->sm_dbuf, &sm->sm_blksz, &blocks); in space_map_open_impl()
809 sm->sm_phys = sm->sm_dbuf->db_data; in space_map_open_impl()
817 space_map_t *sm; in space_map_open() local
824 sm = kmem_alloc(sizeof (space_map_t), KM_SLEEP); in space_map_open()
826 sm->sm_start = start; in space_map_open()
827 sm->sm_size = size; in space_map_open()
828 sm->sm_shift = shift; in space_map_open()
829 sm->sm_os = os; in space_map_open()
830 sm->sm_object = object; in space_map_open()
831 sm->sm_blksz = 0; in space_map_open()
832 sm->sm_dbuf = NULL; in space_map_open()
833 sm->sm_phys = NULL; in space_map_open()
835 error = space_map_open_impl(sm); in space_map_open()
837 space_map_close(sm); in space_map_open()
840 *smp = sm; in space_map_open()
846 space_map_close(space_map_t *sm) in space_map_close() argument
848 if (sm == NULL) in space_map_close()
851 if (sm->sm_dbuf != NULL) in space_map_close()
852 dmu_buf_rele(sm->sm_dbuf, sm); in space_map_close()
853 sm->sm_dbuf = NULL; in space_map_close()
854 sm->sm_phys = NULL; in space_map_close()
856 kmem_free(sm, sizeof (*sm)); in space_map_close()
860 space_map_truncate(space_map_t *sm, int blocksize, dmu_tx_t *tx) in space_map_truncate() argument
862 objset_t *os = sm->sm_os; in space_map_truncate()
870 dmu_object_info_from_db(sm->sm_dbuf, &doi); in space_map_truncate()
884 zfs_dbgmsg("txg %llu, spa %s, sm %px, reallocating " in space_map_truncate()
886 (u_longlong_t)dmu_tx_get_txg(tx), spa_name(spa), sm, in space_map_truncate()
887 (u_longlong_t)sm->sm_object, in space_map_truncate()
891 space_map_free(sm, tx); in space_map_truncate()
892 dmu_buf_rele(sm->sm_dbuf, sm); in space_map_truncate()
894 sm->sm_object = space_map_alloc(sm->sm_os, blocksize, tx); in space_map_truncate()
895 VERIFY0(space_map_open_impl(sm)); in space_map_truncate()
897 VERIFY0(dmu_free_range(os, space_map_object(sm), 0, -1ULL, tx)); in space_map_truncate()
904 memset(sm->sm_phys->smp_histogram, 0, in space_map_truncate()
905 sizeof (sm->sm_phys->smp_histogram)); in space_map_truncate()
908 dmu_buf_will_dirty(sm->sm_dbuf, tx); in space_map_truncate()
909 sm->sm_phys->smp_length = 0; in space_map_truncate()
910 sm->sm_phys->smp_alloc = 0; in space_map_truncate()
952 space_map_free(space_map_t *sm, dmu_tx_t *tx) in space_map_free() argument
954 if (sm == NULL) in space_map_free()
957 space_map_free_obj(sm->sm_os, space_map_object(sm), tx); in space_map_free()
958 sm->sm_object = 0; in space_map_free()
967 space_map_estimate_optimal_size(space_map_t *sm, zfs_range_tree_t *rt, in space_map_estimate_optimal_size() argument
970 spa_t *spa = dmu_objset_spa(sm->sm_os); in space_map_estimate_optimal_size()
971 uint64_t shift = sm->sm_shift; in space_map_estimate_optimal_size()
1037 (vdev_id == SM_NO_VDEVID && sm->sm_size < SM_OFFSET_MAX)) { in space_map_estimate_optimal_size()
1083 size += ((size / sm->sm_blksz) + 1) * sizeof (uint64_t); in space_map_estimate_optimal_size()
1089 space_map_object(space_map_t *sm) in space_map_object() argument
1091 return (sm != NULL ? sm->sm_object : 0); in space_map_object()
1095 space_map_allocated(space_map_t *sm) in space_map_allocated() argument
1097 return (sm != NULL ? sm->sm_phys->smp_alloc : 0); in space_map_allocated()
1101 space_map_length(space_map_t *sm) in space_map_length() argument
1103 return (sm != NULL ? sm->sm_phys->smp_length : 0); in space_map_length()
1107 space_map_nblocks(space_map_t *sm) in space_map_nblocks() argument
1109 if (sm == NULL) in space_map_nblocks()
1111 return (DIV_ROUND_UP(space_map_length(sm), sm->sm_blksz)); in space_map_nblocks()