Searched refs:TXG_SIZE (Results 1 – 22 of 22) sorted by relevance
40 #define TXG_SIZE 4 /* next power of 2 */ macro41 #define TXG_MASK (TXG_SIZE - 1) /* mask for size */42 #define TXG_INITIAL TXG_SIZE /* initial txg */56 struct txg_node *tn_next[TXG_SIZE];57 uint8_t tn_member[TXG_SIZE];63 txg_node_t *tl_head[TXG_SIZE];
182 uint8_t dn_next_type[TXG_SIZE];183 uint8_t dn_next_nblkptr[TXG_SIZE];184 uint8_t dn_next_nlevels[TXG_SIZE];185 uint8_t dn_next_indblkshift[TXG_SIZE];186 uint8_t dn_next_bonustype[TXG_SIZE];187 uint8_t dn_rm_spillblk[TXG_SIZE]; /* for removing spill blk */188 uint16_t dn_next_bonuslen[TXG_SIZE];189 uint32_t dn_next_blksz[TXG_SIZE]; /* next block size in bytes */197 list_node_t dn_dirty_link[TXG_SIZE]; /* next on dataset's dirty */201 list_t dn_dirty_records[TXG_SIZE];[all …]
73 kcondvar_t tc_cv[TXG_SIZE];74 uint64_t tc_count[TXG_SIZE]; /* tx hold count on each txg */75 list_t tc_callbacks[TXG_SIZE]; /* commit cb list */
168 range_tree_t *ms_alloctree[TXG_SIZE];169 range_tree_t *ms_freetree[TXG_SIZE];
100 uint64_t zl_replayed_seq[TXG_SIZE]; /* last replayed rec seq */120 itxg_t zl_itxg[TXG_SIZE]; /* intent log txg chains */
116 list_t os_dirty_dnodes[TXG_SIZE];117 list_t os_free_dnodes[TXG_SIZE];
110 uint64_t dd_tempreserved[TXG_SIZE];112 int64_t dd_space_towrite[TXG_SIZE];
203 uint64_t ds_resume_object[TXG_SIZE];204 uint64_t ds_resume_offset[TXG_SIZE];205 uint64_t ds_resume_bytes[TXG_SIZE];
104 uint64_t dp_dirty_pertxg[TXG_SIZE];
167 bplist_t spa_free_bplist[TXG_SIZE]; /* bplist of stuff to free */
131 for (i = 0; i < TXG_SIZE; i++) { in txg_init()175 for (i = 0; i < TXG_SIZE; i++) { in txg_fini()699 for (t = 0; t < TXG_SIZE; t++) in txg_list_create()708 for (t = 0; t < TXG_SIZE; t++) in txg_list_destroy()730 for (int i = 0; i < TXG_SIZE; i++) { in txg_all_lists_empty()
128 for (i = 0; i < TXG_SIZE; i++) { in dnode_cons()176 for (i = 0; i < TXG_SIZE; i++) { in dnode_dest()266 for (i = 0; i < TXG_SIZE; i++) { in dnode_verify()573 for (i = 0; i < TXG_SIZE; i++) { in dnode_allocate()744 for (i = 0; i < TXG_SIZE; i++) { in dnode_move_impl()808 for (i = 0; i < TXG_SIZE; i++) { in dnode_move_impl()1743 for (i = 0; i < TXG_SIZE; i++) { in dnode_spill_freed()1748 return (i < TXG_SIZE); in dnode_spill_freed()1775 for (i = 0; i < TXG_SIZE; i++) { in dnode_block_freed()1781 return (i < TXG_SIZE); in dnode_block_freed()
570 for (int t = 0; t < TXG_SIZE; t++) in spa_add()716 for (int t = 0; t < TXG_SIZE; t++) in spa_remove()1442 freeze_txg = spa_last_synced_txg(spa) + TXG_SIZE; in spa_freeze()
154 int metaslab_unload_delay = TXG_SIZE * 2;1303 for (int t = 0; t < TXG_SIZE; t++) { in metaslab_fini()1932 for (int t = 0; t < TXG_SIZE; t++) { in metaslab_sync_done()2580 for (int j = 0; j < TXG_SIZE; j++) in metaslab_check_free()
150 for (t = 0; t < TXG_SIZE; t++) { in dsl_dir_evict_async()1046 for (i = 0; i < TXG_SIZE; i++) { in dsl_dir_space_towrite()1150 for (i = 0; i < TXG_SIZE; i++) in dsl_dir_tempreserve_impl()
491 for (int t = 0; t < TXG_SIZE; t++) { in zilog_is_dirty()1699 for (int i = 0; i < TXG_SIZE; i++) { in zil_alloc()1740 for (int i = 0; i < TXG_SIZE; i++) { in zil_free()
1872 for (i = 0; i < TXG_SIZE; i++) { in dmu_object_wait_synced()1878 if (i != TXG_SIZE) { in dmu_object_wait_synced()
452 for (i = 0; i < TXG_SIZE; i++) { in dmu_objset_open_impl()700 for (int t = 0; t < TXG_SIZE; t++) in dmu_objset_evict()
715 for (t = 0; t < TXG_SIZE; t++) { in vdev_top_transfer()
1064 for (int t = 0; t < TXG_SIZE; t++) { in dsl_dataset_is_dirty()
4840 for (int t = 0; t < TXG_SIZE; t++) in spa_vdev_detach()
1259 uintptr_t ms_alloctree[TXG_SIZE];1260 uintptr_t ms_freetree[TXG_SIZE];1701 int64_t dd_space_towrite[TXG_SIZE];1711 uint64_t ms_alloctree[TXG_SIZE];1712 uint64_t ms_freetree[TXG_SIZE];1733 for (i = 0; i < TXG_SIZE; i++) { in space_cb()2185 uintptr_t lw_head[TXG_SIZE];2205 for (i = 0; i < TXG_SIZE; i++) in txg_list_walk_init_common()2222 return (txg_list_walk_init_common(wsp, 0, TXG_SIZE-1)); in txg_list_walk_init()