Lines Matching refs:txg

294     zil_parse_lr_func_t *parse_lr_func, void *arg, uint64_t txg)  in zil_parse()  argument
333 if ((error = parse_blk_func(zilog, &blk, arg, txg)) != 0) in zil_parse()
352 if ((error = parse_lr_func(zilog, lr, arg, txg)) != 0) in zil_parse()
441 zil_alloc_lwb(zilog_t *zilog, blkptr_t *bp, uint64_t txg) in zil_alloc_lwb() argument
449 lwb->lwb_max_txg = txg; in zil_alloc_lwb()
472 zilog_dirty(zilog_t *zilog, uint64_t txg) in zilog_dirty() argument
480 if (txg_list_add(&dp->dp_dirty_zilogs, zilog, txg)) { in zilog_dirty()
506 uint64_t txg = 0; in zil_create() local
530 txg = dmu_tx_get_txg(tx); in zil_create()
533 zio_free_zil(zilog->zl_spa, txg, &blk); in zil_create()
537 error = zio_alloc_zil(zilog->zl_spa, txg, &blk, NULL, in zil_create()
548 lwb = zil_alloc_lwb(zilog, &blk, txg); in zil_create()
557 txg_wait_synced(zilog->zl_dmu_pool, txg); in zil_create()
580 uint64_t txg; in zil_destroy() local
595 txg = dmu_tx_get_txg(tx); in zil_destroy()
599 ASSERT3U(zilog->zl_destroy_txg, <, txg); in zil_destroy()
600 zilog->zl_destroy_txg = txg; in zil_destroy()
610 zio_free_zil(zilog->zl_spa, txg, &lwb->lwb_blk); in zil_destroy()
932 uint64_t txg; in zil_lwb_write_start() local
958 txg = dmu_tx_get_txg(tx); in zil_lwb_write_start()
991 error = zio_alloc_zil(spa, txg, bp, &lwb->lwb_blk, zil_blksz, in zil_lwb_write_start()
994 ASSERT3U(bp->blk_birth, ==, txg); in zil_lwb_write_start()
1001 nlwb = zil_alloc_lwb(zilog, bp, txg); in zil_lwb_write_start()
1041 uint64_t txg = lrc->lrc_txg; in zil_lwb_commit() local
1070 txg_wait_synced(zilog->zl_dmu_pool, txg); in zil_lwb_commit()
1084 if (txg > spa_freeze_txg(zilog->zl_spa)) in zil_lwb_commit()
1085 txg_wait_synced(zilog->zl_dmu_pool, txg); in zil_lwb_commit()
1101 txg_wait_synced(zilog->zl_dmu_pool, txg); in zil_lwb_commit()
1120 lwb->lwb_max_txg = MAX(lwb->lwb_max_txg, txg); in zil_lwb_commit()
1207 uint64_t otxg, txg; in zil_remove_async() local
1222 for (txg = otxg; txg < (otxg + TXG_CONCURRENT_STATES); txg++) { in zil_remove_async()
1223 itxg_t *itxg = &zilog->zl_itxg[txg & TXG_MASK]; in zil_remove_async()
1226 if (itxg->itxg_txg != txg) { in zil_remove_async()
1251 uint64_t txg; in zil_itx_assign() local
1272 txg = ZILTEST_TXG; in zil_itx_assign()
1274 txg = dmu_tx_get_txg(tx); in zil_itx_assign()
1276 itxg = &zilog->zl_itxg[txg & TXG_MASK]; in zil_itx_assign()
1279 if (itxg->itxg_txg != txg) { in zil_itx_assign()
1291 itxg->itxg_txg = txg; in zil_itx_assign()
1322 zilog_dirty(zilog, txg); in zil_itx_assign()
1374 uint64_t otxg, txg; in zil_get_commit_list() local
1383 for (txg = otxg; txg < (otxg + TXG_CONCURRENT_STATES); txg++) { in zil_get_commit_list()
1384 itxg_t *itxg = &zilog->zl_itxg[txg & TXG_MASK]; in zil_get_commit_list()
1387 if (itxg->itxg_txg != txg) { in zil_get_commit_list()
1407 uint64_t otxg, txg; in zil_async_to_sync() local
1417 for (txg = otxg; txg < (otxg + TXG_CONCURRENT_STATES); txg++) { in zil_async_to_sync()
1418 itxg_t *itxg = &zilog->zl_itxg[txg & TXG_MASK]; in zil_async_to_sync()
1421 if (itxg->itxg_txg != txg) { in zil_async_to_sync()
1456 uint64_t txg; in zil_commit_writer() local
1487 txg = itx->itx_lr.lrc_txg; in zil_commit_writer()
1488 ASSERT(txg); in zil_commit_writer()
1490 if (txg > spa_last_synced_txg(spa) || txg > spa_freeze_txg(spa)) in zil_commit_writer()
1592 uint64_t txg = dmu_tx_get_txg(tx); in zil_sync() local
1594 uint64_t *replayed_seq = &zilog->zl_replayed_seq[txg & TXG_MASK]; in zil_sync()
1614 if (zilog->zl_destroy_txg == txg) { in zil_sync()
1638 if (lwb->lwb_buf != NULL || lwb->lwb_max_txg > txg) in zil_sync()
1641 zio_free_zil(spa, txg, &lwb->lwb_blk); in zil_sync()
1789 uint64_t txg = 0; in zil_close() local
1802 txg = lwb->lwb_max_txg; in zil_close()
1804 if (txg) in zil_close()
1805 txg_wait_synced(zilog->zl_dmu_pool, txg); in zil_close()