Lines Matching refs:b_l1hdr
773 #define HDR_L2ONLY_SIZE ((int64_t)offsetof(arc_buf_hdr_t, b_l1hdr))
1186 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_NUMFUNCS; in hdr_full_cons()
1187 zfs_refcount_create(&hdr->b_l1hdr.b_refcnt); in hdr_full_cons()
1189 mutex_init(&hdr->b_l1hdr.b_freeze_lock, NULL, MUTEX_DEFAULT, NULL); in hdr_full_cons()
1191 multilist_link_init(&hdr->b_l1hdr.b_arc_node); in hdr_full_cons()
1233 zfs_refcount_destroy(&hdr->b_l1hdr.b_refcnt); in hdr_full_dest()
1235 mutex_destroy(&hdr->b_l1hdr.b_freeze_lock); in hdr_full_dest()
1237 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node)); in hdr_full_dest()
1361 *byteorder = (hdr->b_l1hdr.b_byteswap == DMU_BSWAP_NUMFUNCS) ? in arc_get_raw_params()
1399 buf->b_hdr->b_l1hdr.b_pabd != NULL && in arc_buf_is_shared()
1400 abd_is_linear(buf->b_hdr->b_l1hdr.b_pabd) && in arc_buf_is_shared()
1401 buf->b_data == abd_to_buf(buf->b_hdr->b_l1hdr.b_pabd)); in arc_buf_is_shared()
1424 mutex_enter(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_free()
1425 if (hdr->b_l1hdr.b_freeze_cksum != NULL) { in arc_cksum_free()
1426 kmem_free(hdr->b_l1hdr.b_freeze_cksum, sizeof (zio_cksum_t)); in arc_cksum_free()
1427 hdr->b_l1hdr.b_freeze_cksum = NULL; in arc_cksum_free()
1429 mutex_exit(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_free()
1440 ASSERT(hdr->b_l1hdr.b_state == arc_anon || HDR_EMPTY_OR_LOCKED(hdr)); in arc_hdr_has_uncompressed_buf()
1442 for (arc_buf_t *b = hdr->b_l1hdr.b_buf; b != NULL; b = b->b_next) { in arc_hdr_has_uncompressed_buf()
1471 mutex_enter(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_verify()
1473 if (hdr->b_l1hdr.b_freeze_cksum == NULL || HDR_IO_ERROR(hdr)) { in arc_cksum_verify()
1474 mutex_exit(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_verify()
1479 if (!ZIO_CHECKSUM_EQUAL(*hdr->b_l1hdr.b_freeze_cksum, zc)) in arc_cksum_verify()
1481 mutex_exit(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_verify()
1529 mutex_enter(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_compute()
1530 if (hdr->b_l1hdr.b_freeze_cksum != NULL || ARC_BUF_COMPRESSED(buf)) { in arc_cksum_compute()
1531 mutex_exit(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_compute()
1537 hdr->b_l1hdr.b_freeze_cksum = kmem_alloc(sizeof (zio_cksum_t), in arc_cksum_compute()
1540 hdr->b_l1hdr.b_freeze_cksum); in arc_cksum_compute()
1541 mutex_exit(&hdr->b_l1hdr.b_freeze_lock); in arc_cksum_compute()
1620 ASSERT3P(hdr->b_l1hdr.b_state, ==, arc_anon); in arc_buf_thaw()
1714 for (arc_buf_t *from = hdr->b_l1hdr.b_buf; from != NULL; in arc_buf_try_copy_decompressed_data()
1734 EQUIV(!copied, hdr->b_l1hdr.b_freeze_cksum == NULL); in arc_buf_try_copy_decompressed_data()
1807 abd_t *abd = hdr->b_l1hdr.b_pabd; in arc_hdr_authenticate()
1826 hdr->b_l1hdr.b_pabd, &abd, lsize, MIN(lsize, psize), in arc_hdr_authenticate()
1845 psize, hdr->b_l1hdr.b_byteswap != DMU_BSWAP_NUMFUNCS); in arc_hdr_authenticate()
1874 boolean_t bswap = (hdr->b_l1hdr.b_byteswap != DMU_BSWAP_NUMFUNCS); in arc_hdr_decrypt()
1883 hdr->b_crypt_hdr.b_mac, HDR_GET_PSIZE(hdr), hdr->b_l1hdr.b_pabd, in arc_hdr_decrypt()
1889 abd_copy(hdr->b_l1hdr.b_pabd, hdr->b_crypt_hdr.b_rabd, in arc_hdr_decrypt()
1909 hdr->b_l1hdr.b_pabd, cabd, HDR_GET_PSIZE(hdr), in arc_hdr_decrypt()
1915 arc_free_data_abd(hdr, hdr->b_l1hdr.b_pabd, in arc_hdr_decrypt()
1917 hdr->b_l1hdr.b_pabd = cabd; in arc_hdr_decrypt()
1954 } else if (HDR_HAS_RABD(hdr) && hdr->b_l1hdr.b_pabd == NULL) { in arc_fill_hdr_crypt()
1965 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in arc_fill_hdr_crypt()
1993 ASSERT3PF(hdr->b_l1hdr.b_pabd, !=, NULL, "hdr %px buf %px", hdr, buf); in arc_buf_untransform_in_place()
1995 zio_crypt_copy_dnode_bonus(hdr->b_l1hdr.b_pabd, buf->b_data, in arc_buf_untransform_in_place()
2024 dmu_object_byteswap_t bswap = hdr->b_l1hdr.b_byteswap; in arc_buf_fill()
2104 abd_copy_to_buf(buf->b_data, hdr->b_l1hdr.b_pabd, in arc_buf_fill()
2160 hdr->b_l1hdr.b_pabd, &dabd, in arc_buf_fill()
2242 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in arc_evictable_space_increment()
2243 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_evictable_space_increment()
2250 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_evictable_space_increment()
2259 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL; in arc_evictable_space_increment()
2281 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in arc_evictable_space_decrement()
2282 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_evictable_space_decrement()
2289 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_evictable_space_decrement()
2298 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL; in arc_evictable_space_decrement()
2316 arc_state_t *state = hdr->b_l1hdr.b_state; in add_reference()
2321 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in add_reference()
2322 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in add_reference()
2325 if ((zfs_refcount_add(&hdr->b_l1hdr.b_refcnt, tag) == 1) && in add_reference()
2342 arc_state_t *state = hdr->b_l1hdr.b_state; in remove_reference()
2348 if ((cnt = zfs_refcount_remove(&hdr->b_l1hdr.b_refcnt, tag)) != 0) in remove_reference()
2389 l1hdr = &hdr->b_l1hdr; in arc_buf_info()
2437 old_state = hdr->b_l1hdr.b_state; in arc_change_state()
2438 refcnt = zfs_refcount_count(&hdr->b_l1hdr.b_refcnt); in arc_change_state()
2439 update_old = (hdr->b_l1hdr.b_buf != NULL || in arc_change_state()
2440 hdr->b_l1hdr.b_pabd != NULL || HDR_HAS_RABD(hdr)); in arc_change_state()
2442 IMPLY(GHOST_STATE(old_state), hdr->b_l1hdr.b_buf == NULL); in arc_change_state()
2443 IMPLY(GHOST_STATE(new_state), hdr->b_l1hdr.b_buf == NULL); in arc_change_state()
2444 IMPLY(old_state == arc_anon, hdr->b_l1hdr.b_buf == NULL || in arc_change_state()
2445 ARC_BUF_LAST(hdr->b_l1hdr.b_buf)); in arc_change_state()
2468 if (multilist_link_active(&hdr->b_l1hdr.b_arc_node)) { in arc_change_state()
2506 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_change_state()
2515 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL; in arc_change_state()
2533 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_change_state()
2550 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_change_state()
2571 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL; in arc_change_state()
2588 ASSERT(hdr->b_l1hdr.b_pabd != NULL || in arc_change_state()
2591 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_change_state()
2606 hdr->b_l1hdr.b_state = new_state; in arc_change_state()
2738 hdr->b_l1hdr.b_byteswap == DMU_BSWAP_NUMFUNCS && in arc_can_share()
2767 buf->b_next = hdr->b_l1hdr.b_buf; in arc_buf_alloc_impl()
2818 hdr->b_l1hdr.b_pabd != NULL && in arc_buf_alloc_impl()
2819 abd_is_linear(hdr->b_l1hdr.b_pabd) && in arc_buf_alloc_impl()
2820 !abd_is_linear_page(hdr->b_l1hdr.b_pabd); in arc_buf_alloc_impl()
2824 buf->b_data = abd_to_buf(hdr->b_l1hdr.b_pabd); in arc_buf_alloc_impl()
2834 hdr->b_l1hdr.b_buf = buf; in arc_buf_alloc_impl()
2913 (void) zfs_refcount_add(&hdr->b_l1hdr.b_refcnt, tag); in arc_return_buf()
2914 (void) zfs_refcount_remove(&hdr->b_l1hdr.b_refcnt, arc_onloan_tag); in arc_return_buf()
2927 (void) zfs_refcount_add(&hdr->b_l1hdr.b_refcnt, arc_onloan_tag); in arc_loan_inuse_buf()
2928 (void) zfs_refcount_remove(&hdr->b_l1hdr.b_refcnt, tag); in arc_loan_inuse_buf()
2949 arc_state_t *state = hdr->b_l1hdr.b_state; in arc_hdr_free_on_write()
2954 if (multilist_link_active(&hdr->b_l1hdr.b_arc_node)) { in arc_hdr_free_on_write()
2955 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in arc_hdr_free_on_write()
2972 l2arc_free_abd_on_write(hdr->b_l1hdr.b_pabd, size, type); in arc_hdr_free_on_write()
2985 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_share_buf()
2995 &hdr->b_l1hdr.b_state->arcs_size[arc_buf_type(hdr)], in arc_share_buf()
2997 hdr->b_l1hdr.b_pabd = abd_get_from_buf(buf->b_data, arc_buf_size(buf)); in arc_share_buf()
2998 abd_take_ownership_of_buf(hdr->b_l1hdr.b_pabd, in arc_share_buf()
3017 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in arc_unshare_buf()
3025 &hdr->b_l1hdr.b_state->arcs_size[arc_buf_type(hdr)], in arc_unshare_buf()
3028 abd_release_ownership_of_buf(hdr->b_l1hdr.b_pabd); in arc_unshare_buf()
3029 abd_free(hdr->b_l1hdr.b_pabd); in arc_unshare_buf()
3030 hdr->b_l1hdr.b_pabd = NULL; in arc_unshare_buf()
3053 arc_buf_t **bufp = &hdr->b_l1hdr.b_buf; in arc_buf_remove()
3121 hdr->b_l1hdr.b_pabd != NULL && !HDR_IO_IN_PROGRESS(hdr)) { in arc_buf_destroy_impl()
3123 for (b = hdr->b_l1hdr.b_buf; b; b = b->b_next) { in arc_buf_destroy_impl()
3153 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in arc_buf_destroy_impl()
3211 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_hdr_alloc_abd()
3212 hdr->b_l1hdr.b_pabd = arc_get_data_abd(hdr, size, hdr, in arc_hdr_alloc_abd()
3214 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in arc_hdr_alloc_abd()
3227 ASSERT(hdr->b_l1hdr.b_pabd != NULL || HDR_HAS_RABD(hdr)); in arc_hdr_free_abd()
3242 arc_free_data_abd(hdr, hdr->b_l1hdr.b_pabd, size, hdr); in arc_hdr_free_abd()
3249 hdr->b_l1hdr.b_pabd = NULL; in arc_hdr_free_abd()
3252 if (hdr->b_l1hdr.b_pabd == NULL && !HDR_HAS_RABD(hdr)) in arc_hdr_free_abd()
3253 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_NUMFUNCS; in arc_hdr_free_abd()
3293 ASSERT3P(hdr->b_l1hdr.b_freeze_cksum, ==, NULL); in arc_hdr_alloc()
3306 hdr->b_l1hdr.b_state = arc_anon; in arc_hdr_alloc()
3307 hdr->b_l1hdr.b_arc_access = 0; in arc_hdr_alloc()
3308 hdr->b_l1hdr.b_mru_hits = 0; in arc_hdr_alloc()
3309 hdr->b_l1hdr.b_mru_ghost_hits = 0; in arc_hdr_alloc()
3310 hdr->b_l1hdr.b_mfu_hits = 0; in arc_hdr_alloc()
3311 hdr->b_l1hdr.b_mfu_ghost_hits = 0; in arc_hdr_alloc()
3312 hdr->b_l1hdr.b_buf = NULL; in arc_hdr_alloc()
3314 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in arc_hdr_alloc()
3351 nhdr->b_l1hdr.b_state = arc_l2c_only; in arc_hdr_realloc()
3354 ASSERT3P(nhdr->b_l1hdr.b_pabd, ==, NULL); in arc_hdr_realloc()
3357 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in arc_hdr_realloc()
3359 ASSERT3P(hdr->b_l1hdr.b_freeze_cksum, ==, NULL); in arc_hdr_realloc()
3369 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node)); in arc_hdr_realloc()
3378 VERIFY3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_hdr_realloc()
3439 ASSERT3P(hdr->b_l1hdr.b_state, ==, arc_anon); in arc_convert_to_raw()
3445 hdr->b_l1hdr.b_byteswap = (byteorder == ZFS_HOST_BYTEORDER) ? in arc_convert_to_raw()
3529 hdr->b_l1hdr.b_byteswap = (byteorder == ZFS_HOST_BYTEORDER) ? in arc_alloc_raw_buf()
3645 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in arc_hdr_destroy()
3646 ASSERT3P(hdr->b_l1hdr.b_state, ==, arc_anon); in arc_hdr_destroy()
3691 while (hdr->b_l1hdr.b_buf != NULL) in arc_hdr_destroy()
3692 arc_buf_destroy_impl(hdr->b_l1hdr.b_buf); in arc_hdr_destroy()
3694 if (hdr->b_l1hdr.b_pabd != NULL) in arc_hdr_destroy()
3703 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node)); in arc_hdr_destroy()
3704 ASSERT3P(hdr->b_l1hdr.b_acb, ==, NULL); in arc_hdr_destroy()
3706 ASSERT3P(hdr->b_l1hdr.b_freeze_cksum, ==, NULL); in arc_hdr_destroy()
3719 if (hdr->b_l1hdr.b_state == arc_anon) { in arc_buf_destroy()
3720 ASSERT3P(hdr->b_l1hdr.b_buf, ==, buf); in arc_buf_destroy()
3731 ASSERT3P(hdr->b_l1hdr.b_buf, !=, NULL); in arc_buf_destroy()
3733 ASSERT3P(hdr->b_l1hdr.b_state, !=, arc_anon); in arc_buf_destroy()
3774 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in arc_evict_hdr()
3775 ASSERT0(zfs_refcount_count(&hdr->b_l1hdr.b_refcnt)); in arc_evict_hdr()
3778 state = hdr->b_l1hdr.b_state; in arc_evict_hdr()
3799 ASSERT(hdr->b_l1hdr.b_pabd == NULL); in arc_evict_hdr()
3827 ddi_get_lbolt() - hdr->b_l1hdr.b_arc_access < in arc_evict_hdr()
3868 if (hdr->b_l1hdr.b_pabd != NULL) in arc_evict_hdr()
5250 arc_state_t *state = hdr->b_l1hdr.b_state; in arc_get_data_impl()
5265 if (multilist_link_active(&hdr->b_l1hdr.b_arc_node)) { in arc_get_data_impl()
5266 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in arc_get_data_impl()
5301 arc_state_t *state = hdr->b_l1hdr.b_state; in arc_free_data_impl()
5305 if (multilist_link_active(&hdr->b_l1hdr.b_arc_node)) { in arc_free_data_impl()
5306 ASSERT(zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)); in arc_free_data_impl()
5366 if (hdr->b_l1hdr.b_state == arc_anon) { in arc_access()
5372 ASSERT0(hdr->b_l1hdr.b_arc_access); in arc_access()
5373 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5383 } else if (hdr->b_l1hdr.b_state == arc_mru) { in arc_access()
5389 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5392 hdr->b_l1hdr.b_mru_hits++; in arc_access()
5400 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5408 if (ddi_time_after(now, hdr->b_l1hdr.b_arc_access + in arc_access()
5410 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5414 } else if (hdr->b_l1hdr.b_state == arc_mru_ghost) { in arc_access()
5422 hdr->b_l1hdr.b_mru_ghost_hits++; in arc_access()
5424 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5435 } else if (hdr->b_l1hdr.b_state == arc_mfu) { in arc_access()
5441 hdr->b_l1hdr.b_mfu_hits++; in arc_access()
5444 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5445 } else if (hdr->b_l1hdr.b_state == arc_mfu_ghost) { in arc_access()
5451 hdr->b_l1hdr.b_mfu_ghost_hits++; in arc_access()
5453 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5458 } else if (hdr->b_l1hdr.b_state == arc_uncached) { in arc_access()
5465 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5466 } else if (hdr->b_l1hdr.b_state == arc_l2c_only) { in arc_access()
5471 hdr->b_l1hdr.b_arc_access = now; in arc_access()
5476 hdr->b_l1hdr.b_state); in arc_access()
5494 if (hdr->b_l1hdr.b_state == arc_anon || HDR_EMPTY(hdr)) in arc_buf_access()
5500 if (hdr->b_l1hdr.b_state == arc_anon || HDR_EMPTY(hdr)) { in arc_buf_access()
5506 ASSERT(hdr->b_l1hdr.b_state == arc_mru || in arc_buf_access()
5507 hdr->b_l1hdr.b_state == arc_mfu || in arc_buf_access()
5508 hdr->b_l1hdr.b_state == arc_uncached); in arc_buf_access()
5629 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_UINT64; in arc_read_done()
5631 hdr->b_l1hdr.b_byteswap = in arc_read_done()
5635 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_NUMFUNCS; in arc_read_done()
5646 callback_list = hdr->b_l1hdr.b_acb; in arc_read_done()
5648 hdr->b_l1hdr.b_acb = NULL; in arc_read_done()
5730 if (hdr->b_l1hdr.b_state != arc_anon) in arc_read_done()
5798 arc_state_t *state = hdr->b_l1hdr.b_state; in arc_cached()
5900 (hdr->b_l1hdr.b_pabd != NULL && !encrypted_read))) { in arc_read()
5923 zio_t *head_zio = hdr->b_l1hdr.b_acb->acb_zio_head; in arc_read()
5980 acb->acb_next = hdr->b_l1hdr.b_acb; in arc_read()
5981 hdr->b_l1hdr.b_acb->acb_prev = acb; in arc_read()
5982 hdr->b_l1hdr.b_acb = acb; in arc_read()
6005 ASSERT(hdr->b_l1hdr.b_state == arc_mru || in arc_read()
6006 hdr->b_l1hdr.b_state == arc_mfu || in arc_read()
6007 hdr->b_l1hdr.b_state == arc_uncached); in arc_read()
6132 if (GHOST_STATE(hdr->b_l1hdr.b_state)) { in arc_read()
6133 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_read()
6137 &hdr->b_l1hdr.b_refcnt)); in arc_read()
6138 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL); in arc_read()
6140 ASSERT3P(hdr->b_l1hdr.b_freeze_cksum, ==, NULL); in arc_read()
6160 hdr->b_l1hdr.b_acb->acb_zio_head; in arc_read()
6161 acb->acb_next = hdr->b_l1hdr.b_acb; in arc_read()
6162 hdr->b_l1hdr.b_acb->acb_prev = acb; in arc_read()
6163 hdr->b_l1hdr.b_acb = acb; in arc_read()
6200 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in arc_read()
6202 hdr_abd = hdr->b_l1hdr.b_pabd; in arc_read()
6223 ASSERT(!GHOST_STATE(hdr->b_l1hdr.b_state)); in arc_read()
6234 ASSERT3P(hdr->b_l1hdr.b_acb, ==, NULL); in arc_read()
6235 hdr->b_l1hdr.b_acb = acb; in arc_read()
6565 zfs_refcount_is_zero(&hdr->b_l1hdr.b_refcnt)) { in arc_freed()
6599 if (hdr->b_l1hdr.b_state == arc_anon) { in arc_release()
6604 ASSERT3P(hdr->b_l1hdr.b_buf, ==, buf); in arc_release()
6606 ASSERT3S(zfs_refcount_count(&hdr->b_l1hdr.b_refcnt), ==, 1); in arc_release()
6607 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node)); in arc_release()
6609 hdr->b_l1hdr.b_arc_access = 0; in arc_release()
6629 arc_state_t *state = hdr->b_l1hdr.b_state; in arc_release()
6635 ASSERT3S(zfs_refcount_count(&hdr->b_l1hdr.b_refcnt), >, 0); in arc_release()
6640 if (hdr->b_l1hdr.b_buf != buf || !ARC_BUF_LAST(buf)) { in arc_release()
6650 ASSERT3P(hdr->b_l1hdr.b_buf, !=, buf); in arc_release()
6685 abd_copy_from_buf(hdr->b_l1hdr.b_pabd, in arc_release()
6702 ASSERT(hdr->b_l1hdr.b_pabd != NULL || HDR_HAS_RABD(hdr)); in arc_release()
6718 ASSERT3P(nhdr->b_l1hdr.b_buf, ==, NULL); in arc_release()
6719 ASSERT0(zfs_refcount_count(&nhdr->b_l1hdr.b_refcnt)); in arc_release()
6723 nhdr->b_l1hdr.b_buf = buf; in arc_release()
6724 (void) zfs_refcount_add(&nhdr->b_l1hdr.b_refcnt, tag); in arc_release()
6730 ASSERT(zfs_refcount_count(&hdr->b_l1hdr.b_refcnt) == 1); in arc_release()
6732 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node)); in arc_release()
6743 hdr->b_l1hdr.b_mru_hits = 0; in arc_release()
6744 hdr->b_l1hdr.b_mru_ghost_hits = 0; in arc_release()
6745 hdr->b_l1hdr.b_mfu_hits = 0; in arc_release()
6746 hdr->b_l1hdr.b_mfu_ghost_hits = 0; in arc_release()
6748 hdr->b_l1hdr.b_arc_access = 0; in arc_release()
6760 buf->b_hdr->b_l1hdr.b_state == arc_anon); in arc_released()
6767 return (zfs_refcount_count(&buf->b_hdr->b_l1hdr.b_refcnt)); in arc_referenced()
6782 ASSERT(!zfs_refcount_is_zero(&buf->b_hdr->b_l1hdr.b_refcnt)); in arc_write_ready()
6783 ASSERT3P(hdr->b_l1hdr.b_buf, !=, NULL); in arc_write_ready()
6793 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_write_ready()
6805 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_write_ready()
6825 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_UINT64; in arc_write_ready()
6827 hdr->b_l1hdr.b_byteswap = in arc_write_ready()
6831 hdr->b_l1hdr.b_byteswap = DMU_BSWAP_NUMFUNCS; in arc_write_ready()
6912 abd_copy(hdr->b_l1hdr.b_pabd, zio->io_abd, psize); in arc_write_ready()
6916 abd_copy_from_buf(hdr->b_l1hdr.b_pabd, buf->b_data, in arc_write_ready()
6922 ASSERT3P(hdr->b_l1hdr.b_buf, ==, buf); in arc_write_ready()
6949 ASSERT3P(hdr->b_l1hdr.b_acb, ==, NULL); in arc_write_done()
6990 &exists->b_l1hdr.b_refcnt)); in arc_write_done()
7004 ASSERT3P(hdr->b_l1hdr.b_buf, !=, NULL); in arc_write_done()
7005 ASSERT(ARC_BUF_LAST(hdr->b_l1hdr.b_buf)); in arc_write_done()
7006 ASSERT(hdr->b_l1hdr.b_state == arc_anon); in arc_write_done()
7014 if (exists == NULL && hdr->b_l1hdr.b_state == arc_anon) in arc_write_done()
7045 ASSERT3P(hdr->b_l1hdr.b_acb, ==, NULL); in arc_write()
7046 ASSERT3P(hdr->b_l1hdr.b_buf, !=, NULL); in arc_write()
7058 (hdr->b_l1hdr.b_byteswap == DMU_BSWAP_NUMFUNCS) ? in arc_write()
7091 if (hdr->b_l1hdr.b_pabd != NULL) { in arc_write()
7114 ASSERT3P(hdr->b_l1hdr.b_pabd, ==, NULL); in arc_write()
7618 offsetof(arc_buf_hdr_t, b_l1hdr.b_arc_node), index_func); in arc_state_multilist_init()
8819 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in l2arc_untransform()
8837 hdr->b_l1hdr.b_pabd, &no_crypt); in l2arc_untransform()
8849 arc_free_data_abd(hdr, hdr->b_l1hdr.b_pabd, in l2arc_untransform()
8851 hdr->b_l1hdr.b_pabd = eabd; in l2arc_untransform()
8869 hdr->b_l1hdr.b_pabd, cabd, HDR_GET_PSIZE(hdr), in l2arc_untransform()
8876 arc_free_data_abd(hdr, hdr->b_l1hdr.b_pabd, in l2arc_untransform()
8878 hdr->b_l1hdr.b_pabd = cabd; in l2arc_untransform()
8929 abd_copy(hdr->b_l1hdr.b_pabd, in l2arc_read_done()
8953 ASSERT3P(hdr->b_l1hdr.b_pabd, !=, NULL); in l2arc_read_done()
8954 zio->io_abd = zio->io_orig_abd = hdr->b_l1hdr.b_pabd; in l2arc_read_done()
8963 ASSERT(zio->io_abd == hdr->b_l1hdr.b_pabd || in l2arc_read_done()
9005 hdr->b_crypt_hdr.b_rabd : hdr->b_l1hdr.b_pabd; in l2arc_read_done()
9019 for (struct arc_callback *acb = hdr->b_l1hdr.b_acb; in l2arc_read_done()
9281 ASSERT(hdr->b_l1hdr.b_state != arc_l2c_only); in l2arc_evict()
9337 abd_t *cabd = NULL, *eabd = NULL, *to_write = hdr->b_l1hdr.b_pabd; in l2arc_apply_transforms()
9342 boolean_t bswap = (hdr->b_l1hdr.b_byteswap != DMU_BSWAP_NUMFUNCS); in l2arc_apply_transforms()
9369 abd_copy(to_write, hdr->b_l1hdr.b_pabd, size); in l2arc_apply_transforms()
9431 ASSERT3P(to_write, !=, hdr->b_l1hdr.b_pabd); in l2arc_apply_transforms()
9556 ASSERT(hdr->b_l1hdr.b_pabd != NULL || in l2arc_write_buffers()
9608 to_write = hdr->b_l1hdr.b_pabd; in l2arc_write_buffers()
9629 hdr->b_l1hdr.b_state->arcs_state; in l2arc_write_buffers()