Lines Matching defs:hdr
789 #define HDR_IN_HASH_TABLE(hdr) ((hdr)->b_flags & ARC_FLAG_IN_HASH_TABLE)
790 #define HDR_IO_IN_PROGRESS(hdr) ((hdr)->b_flags & ARC_FLAG_IO_IN_PROGRESS)
791 #define HDR_IO_ERROR(hdr) ((hdr)->b_flags & ARC_FLAG_IO_ERROR)
792 #define HDR_PREFETCH(hdr) ((hdr)->b_flags & ARC_FLAG_PREFETCH)
793 #define HDR_FREED_IN_READ(hdr) ((hdr)->b_flags & ARC_FLAG_FREED_IN_READ)
794 #define HDR_BUF_AVAILABLE(hdr) ((hdr)->b_flags & ARC_FLAG_BUF_AVAILABLE)
796 #define HDR_L2CACHE(hdr) ((hdr)->b_flags & ARC_FLAG_L2CACHE)
797 #define HDR_L2COMPRESS(hdr) ((hdr)->b_flags & ARC_FLAG_L2COMPRESS)
798 #define HDR_L2_READING(hdr) \
799 (((hdr)->b_flags & ARC_FLAG_IO_IN_PROGRESS) && \
800 ((hdr)->b_flags & ARC_FLAG_HAS_L2HDR))
801 #define HDR_L2_WRITING(hdr) ((hdr)->b_flags & ARC_FLAG_L2_WRITING)
802 #define HDR_L2_EVICTED(hdr) ((hdr)->b_flags & ARC_FLAG_L2_EVICTED)
803 #define HDR_L2_WRITE_HEAD(hdr) ((hdr)->b_flags & ARC_FLAG_L2_WRITE_HEAD)
805 #define HDR_ISTYPE_METADATA(hdr) \
806 ((hdr)->b_flags & ARC_FLAG_BUFC_METADATA)
807 #define HDR_ISTYPE_DATA(hdr) (!HDR_ISTYPE_METADATA(hdr))
809 #define HDR_HAS_L1HDR(hdr) ((hdr)->b_flags & ARC_FLAG_HAS_L1HDR)
810 #define HDR_HAS_L2HDR(hdr) ((hdr)->b_flags & ARC_FLAG_HAS_L2HDR)
845 #define HDR_LOCK(hdr) \
846 (BUF_HASH_LOCK(BUF_HASH_INDEX(hdr->b_spa, &hdr->b_dva, hdr->b_birth)))
982 buf_discard_identity(arc_buf_hdr_t *hdr)
984 hdr->b_dva.dva_word[0] = 0;
985 hdr->b_dva.dva_word[1] = 0;
986 hdr->b_birth = 0;
996 arc_buf_hdr_t *hdr;
999 for (hdr = buf_hash_table.ht_table[idx]; hdr != NULL;
1000 hdr = hdr->b_hash_next) {
1001 if (BUF_EQUAL(spa, dva, birth, hdr)) {
1003 return (hdr);
1019 buf_hash_insert(arc_buf_hdr_t *hdr, kmutex_t **lockp)
1021 uint64_t idx = BUF_HASH_INDEX(hdr->b_spa, &hdr->b_dva, hdr->b_birth);
1026 ASSERT(!DVA_IS_EMPTY(&hdr->b_dva));
1027 ASSERT(hdr->b_birth != 0);
1028 ASSERT(!HDR_IN_HASH_TABLE(hdr));
1039 if (BUF_EQUAL(hdr->b_spa, &hdr->b_dva, hdr->b_birth, fhdr))
1043 hdr->b_hash_next = buf_hash_table.ht_table[idx];
1044 buf_hash_table.ht_table[idx] = hdr;
1045 hdr->b_flags |= ARC_FLAG_IN_HASH_TABLE;
1063 buf_hash_remove(arc_buf_hdr_t *hdr)
1066 uint64_t idx = BUF_HASH_INDEX(hdr->b_spa, &hdr->b_dva, hdr->b_birth);
1069 ASSERT(HDR_IN_HASH_TABLE(hdr));
1072 while ((fhdr = *hdrp) != hdr) {
1076 *hdrp = hdr->b_hash_next;
1077 hdr->b_hash_next = NULL;
1078 hdr->b_flags &= ~ARC_FLAG_IN_HASH_TABLE;
1117 arc_buf_hdr_t *hdr = vbuf;
1119 bzero(hdr, HDR_FULL_SIZE);
1120 cv_init(&hdr->b_l1hdr.b_cv, NULL, CV_DEFAULT, NULL);
1121 refcount_create(&hdr->b_l1hdr.b_refcnt);
1122 mutex_init(&hdr->b_l1hdr.b_freeze_lock, NULL, MUTEX_DEFAULT, NULL);
1123 multilist_link_init(&hdr->b_l1hdr.b_arc_node);
1133 arc_buf_hdr_t *hdr = vbuf;
1135 bzero(hdr, HDR_L2ONLY_SIZE);
1162 arc_buf_hdr_t *hdr = vbuf;
1164 ASSERT(BUF_EMPTY(hdr));
1165 cv_destroy(&hdr->b_l1hdr.b_cv);
1166 refcount_destroy(&hdr->b_l1hdr.b_refcnt);
1167 mutex_destroy(&hdr->b_l1hdr.b_freeze_lock);
1168 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node));
1176 arc_buf_hdr_t *hdr = vbuf;
1178 ASSERT(BUF_EMPTY(hdr));
1259 arc_hdr_realloc(arc_buf_hdr_t *hdr, kmem_cache_t *old, kmem_cache_t *new)
1261 ASSERT(HDR_HAS_L2HDR(hdr));
1264 l2arc_dev_t *dev = hdr->b_l2hdr.b_dev;
1271 ASSERT(MUTEX_HELD(HDR_LOCK(hdr)));
1272 buf_hash_remove(hdr);
1274 bcopy(hdr, nhdr, HDR_L2ONLY_SIZE);
1288 ASSERT(hdr->b_l1hdr.b_buf == NULL);
1289 ASSERT0(hdr->b_l1hdr.b_datacnt);
1298 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node));
1306 VERIFY(!HDR_L2_WRITING(hdr));
1307 VERIFY3P(hdr->b_l1hdr.b_tmp_cdata, ==, NULL);
1310 if (hdr->b_l1hdr.b_thawed != NULL) {
1311 kmem_free(hdr->b_l1hdr.b_thawed, 1);
1312 hdr->b_l1hdr.b_thawed = NULL;
1324 ASSERT(list_link_active(&hdr->b_l2hdr.b_l2node));
1334 list_insert_after(&dev->l2ad_buflist, hdr, nhdr);
1335 list_remove(&dev->l2ad_buflist, hdr);
1348 hdr->b_l2hdr.b_asize, hdr);
1353 buf_discard_identity(hdr);
1354 hdr->b_freeze_cksum = NULL;
1355 kmem_cache_free(old, hdr);
1458 arc_buf_type(arc_buf_hdr_t *hdr)
1460 if (HDR_ISTYPE_METADATA(hdr)) {
1532 add_reference(arc_buf_hdr_t *hdr, kmutex_t *hash_lock, void *tag)
1534 ASSERT(HDR_HAS_L1HDR(hdr));
1536 arc_state_t *state = hdr->b_l1hdr.b_state;
1538 if ((refcount_add(&hdr->b_l1hdr.b_refcnt, tag) == 1) &&
1542 arc_buf_contents_t type = arc_buf_type(hdr);
1543 uint64_t delta = hdr->b_size * hdr->b_l1hdr.b_datacnt;
1547 multilist_remove(list, hdr);
1550 ASSERT0(hdr->b_l1hdr.b_datacnt);
1551 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL);
1552 delta = hdr->b_size;
1559 hdr->b_flags &= ~ARC_FLAG_PREFETCH;
1564 remove_reference(arc_buf_hdr_t *hdr, kmutex_t *hash_lock, void *tag)
1567 arc_state_t *state = hdr->b_l1hdr.b_state;
1569 ASSERT(HDR_HAS_L1HDR(hdr));
1577 if (((cnt = refcount_remove(&hdr->b_l1hdr.b_refcnt, tag)) == 0) &&
1579 arc_buf_contents_t type = arc_buf_type(hdr);
1583 multilist_insert(list, hdr);
1585 ASSERT(hdr->b_l1hdr.b_datacnt > 0);
1586 atomic_add_64(size, hdr->b_size *
1587 hdr->b_l1hdr.b_datacnt);
1597 arc_change_state(arc_state_t *new_state, arc_buf_hdr_t *hdr,
1604 arc_buf_contents_t buftype = arc_buf_type(hdr);
1607 * We almost always have an L1 hdr here, since we call arc_hdr_realloc()
1609 * L1 hdr doesn't always exist when we change state to arc_anon before
1610 * destroying a header, in which case reallocating to add the L1 hdr is
1613 if (HDR_HAS_L1HDR(hdr)) {
1614 old_state = hdr->b_l1hdr.b_state;
1615 refcnt = refcount_count(&hdr->b_l1hdr.b_refcnt);
1616 datacnt = hdr->b_l1hdr.b_datacnt;
1629 from_delta = to_delta = datacnt * hdr->b_size;
1639 ASSERT(HDR_HAS_L1HDR(hdr));
1640 multilist_remove(&old_state->arcs_list[buftype], hdr);
1648 ASSERT(hdr->b_l1hdr.b_buf == NULL);
1649 from_delta = hdr->b_size;
1663 ASSERT(HDR_HAS_L1HDR(hdr));
1664 multilist_insert(&new_state->arcs_list[buftype], hdr);
1669 ASSERT(hdr->b_l1hdr.b_buf == NULL);
1670 to_delta = hdr->b_size;
1676 ASSERT(!BUF_EMPTY(hdr));
1677 if (new_state == arc_anon && HDR_IN_HASH_TABLE(hdr))
1678 buf_hash_remove(hdr);
1683 ASSERT(HDR_HAS_L1HDR(hdr));
1695 hdr->b_size, hdr);
1704 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL;
1707 hdr->b_size, buf);
1713 ASSERT(HDR_HAS_L1HDR(hdr));
1731 hdr->b_size, hdr);
1740 for (arc_buf_t *buf = hdr->b_l1hdr.b_buf; buf != NULL;
1743 &old_state->arcs_size, hdr->b_size, buf);
1748 if (HDR_HAS_L1HDR(hdr))
1749 hdr->b_l1hdr.b_state = new_state;
1825 arc_buf_hdr_t *hdr;
1829 hdr = kmem_cache_alloc(hdr_full_cache, KM_PUSHPAGE);
1830 ASSERT(BUF_EMPTY(hdr));
1831 ASSERT3P(hdr->b_freeze_cksum, ==, NULL);
1832 hdr->b_size = size;
1833 hdr->b_spa = spa_load_guid(spa);
1836 buf->b_hdr = hdr;
1842 hdr->b_flags = arc_bufc_to_flags(type);
1843 hdr->b_flags |= ARC_FLAG_HAS_L1HDR;
1845 hdr->b_l1hdr.b_buf = buf;
1846 hdr->b_l1hdr.b_state = arc_anon;
1847 hdr->b_l1hdr.b_arc_access = 0;
1848 hdr->b_l1hdr.b_datacnt = 1;
1849 hdr->b_l1hdr.b_tmp_cdata = NULL;
1852 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
1853 (void) refcount_add(&hdr->b_l1hdr.b_refcnt, tag);
1883 arc_buf_hdr_t *hdr = buf->b_hdr;
1886 ASSERT(HDR_HAS_L1HDR(hdr));
1887 (void) refcount_add(&hdr->b_l1hdr.b_refcnt, tag);
1888 (void) refcount_remove(&hdr->b_l1hdr.b_refcnt, arc_onloan_tag);
1890 atomic_add_64(&arc_loaned_bytes, -hdr->b_size);
1897 arc_buf_hdr_t *hdr = buf->b_hdr;
1900 ASSERT(HDR_HAS_L1HDR(hdr));
1901 (void) refcount_add(&hdr->b_l1hdr.b_refcnt, arc_onloan_tag);
1902 (void) refcount_remove(&hdr->b_l1hdr.b_refcnt, tag);
1906 atomic_add_64(&arc_loaned_bytes, hdr->b_size);
1913 arc_buf_hdr_t *hdr = from->b_hdr;
1914 uint64_t size = hdr->b_size;
1916 ASSERT(HDR_HAS_L1HDR(hdr));
1917 ASSERT(hdr->b_l1hdr.b_state != arc_anon);
1920 buf->b_hdr = hdr;
1924 buf->b_next = hdr->b_l1hdr.b_buf;
1925 hdr->b_l1hdr.b_buf = buf;
1935 if (HDR_ISTYPE_DATA(hdr)) {
1939 hdr->b_l1hdr.b_datacnt += 1;
1946 arc_buf_hdr_t *hdr;
1961 hdr = buf->b_hdr;
1962 ASSERT(HDR_HAS_L1HDR(hdr));
1963 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
1966 ASSERT(hdr->b_l1hdr.b_state == arc_mru ||
1967 hdr->b_l1hdr.b_state == arc_mfu);
1969 add_reference(hdr, hash_lock, tag);
1970 DTRACE_PROBE1(arc__hit, arc_buf_hdr_t *, hdr);
1971 arc_access(hdr, hash_lock);
1974 ARCSTAT_CONDSTAT(!HDR_PREFETCH(hdr),
1975 demand, prefetch, !HDR_ISTYPE_METADATA(hdr),
2001 arc_buf_hdr_t *hdr = buf->b_hdr;
2003 if (HDR_L2_WRITING(hdr)) {
2004 arc_buf_free_on_write(buf->b_data, hdr->b_size, free_func);
2007 free_func(buf->b_data, hdr->b_size);
2012 arc_buf_l2_cdata_free(arc_buf_hdr_t *hdr)
2014 ASSERT(HDR_HAS_L2HDR(hdr));
2015 ASSERT(MUTEX_HELD(&hdr->b_l2hdr.b_dev->l2ad_mtx));
2022 if (!HDR_HAS_L1HDR(hdr))
2029 if (!HDR_L2_WRITING(hdr)) {
2030 ASSERT3P(hdr->b_l1hdr.b_tmp_cdata, ==, NULL);
2043 if (hdr->b_l2hdr.b_compress == ZIO_COMPRESS_OFF) {
2044 hdr->b_l1hdr.b_tmp_cdata = NULL;
2052 if (hdr->b_l2hdr.b_compress == ZIO_COMPRESS_EMPTY) {
2053 ASSERT3P(hdr->b_l1hdr.b_tmp_cdata, ==, NULL);
2057 ASSERT(L2ARC_IS_VALID_COMPRESS(hdr->b_l2hdr.b_compress));
2059 arc_buf_free_on_write(hdr->b_l1hdr.b_tmp_cdata,
2060 hdr->b_size, zio_data_buf_free);
2063 hdr->b_l1hdr.b_tmp_cdata = NULL;
2125 /* remove the buf from the hdr list */
2140 arc_hdr_l2hdr_destroy(arc_buf_hdr_t *hdr)
2142 l2arc_buf_hdr_t *l2hdr = &hdr->b_l2hdr;
2146 ASSERT(HDR_HAS_L2HDR(hdr));
2148 list_remove(&dev->l2ad_buflist, hdr);
2154 arc_buf_l2_cdata_free(hdr);
2166 IMPLY(l2hdr->b_daddr == L2ARC_ADDR_UNSET, HDR_L2_WRITING(hdr));
2178 ARCSTAT_INCR(arcstat_l2_size, -hdr->b_size);
2184 l2hdr->b_asize, hdr);
2187 hdr->b_flags &= ~ARC_FLAG_HAS_L2HDR;
2191 arc_hdr_destroy(arc_buf_hdr_t *hdr)
2193 if (HDR_HAS_L1HDR(hdr)) {
2194 ASSERT(hdr->b_l1hdr.b_buf == NULL ||
2195 hdr->b_l1hdr.b_datacnt > 0);
2196 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
2197 ASSERT3P(hdr->b_l1hdr.b_state, ==, arc_anon);
2199 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
2200 ASSERT(!HDR_IN_HASH_TABLE(hdr));
2202 if (HDR_HAS_L2HDR(hdr)) {
2203 l2arc_dev_t *dev = hdr->b_l2hdr.b_dev;
2218 if (HDR_HAS_L2HDR(hdr))
2219 arc_hdr_l2hdr_destroy(hdr);
2225 if (!BUF_EMPTY(hdr))
2226 buf_discard_identity(hdr);
2228 if (hdr->b_freeze_cksum != NULL) {
2229 kmem_free(hdr->b_freeze_cksum, sizeof (zio_cksum_t));
2230 hdr->b_freeze_cksum = NULL;
2233 if (HDR_HAS_L1HDR(hdr)) {
2234 while (hdr->b_l1hdr.b_buf) {
2235 arc_buf_t *buf = hdr->b_l1hdr.b_buf;
2241 arc_buf_destroy(hdr->b_l1hdr.b_buf, FALSE);
2242 hdr->b_l1hdr.b_buf = buf->b_next;
2250 arc_buf_destroy(hdr->b_l1hdr.b_buf, TRUE);
2254 if (hdr->b_l1hdr.b_thawed != NULL) {
2255 kmem_free(hdr->b_l1hdr.b_thawed, 1);
2256 hdr->b_l1hdr.b_thawed = NULL;
2261 ASSERT3P(hdr->b_hash_next, ==, NULL);
2262 if (HDR_HAS_L1HDR(hdr)) {
2263 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node));
2264 ASSERT3P(hdr->b_l1hdr.b_acb, ==, NULL);
2265 kmem_cache_free(hdr_full_cache, hdr);
2267 kmem_cache_free(hdr_l2only_cache, hdr);
2274 arc_buf_hdr_t *hdr = buf->b_hdr;
2275 int hashed = hdr->b_l1hdr.b_state != arc_anon;
2281 kmutex_t *hash_lock = HDR_LOCK(hdr);
2284 hdr = buf->b_hdr;
2285 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
2287 (void) remove_reference(hdr, hash_lock, tag);
2288 if (hdr->b_l1hdr.b_datacnt > 1) {
2291 ASSERT(buf == hdr->b_l1hdr.b_buf);
2293 hdr->b_flags |= ARC_FLAG_BUF_AVAILABLE;
2296 } else if (HDR_IO_IN_PROGRESS(hdr)) {
2304 (void) remove_reference(hdr, NULL, tag);
2305 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
2306 destroy_hdr = !HDR_IO_IN_PROGRESS(hdr);
2309 arc_hdr_destroy(hdr);
2311 if (remove_reference(hdr, NULL, tag) > 0)
2314 arc_hdr_destroy(hdr);
2321 arc_buf_hdr_t *hdr = buf->b_hdr;
2322 kmutex_t *hash_lock = HDR_LOCK(hdr);
2325 if (hdr->b_l1hdr.b_state == arc_anon) {
2326 ASSERT(hdr->b_l1hdr.b_datacnt == 1);
2332 hdr = buf->b_hdr;
2333 ASSERT(hdr->b_l1hdr.b_datacnt > 0);
2334 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
2335 ASSERT(hdr->b_l1hdr.b_state != arc_anon);
2338 (void) remove_reference(hdr, hash_lock, tag);
2339 if (hdr->b_l1hdr.b_datacnt > 1) {
2343 ASSERT(hdr->b_l1hdr.b_buf == buf && buf->b_next == NULL);
2345 hdr->b_flags |= ARC_FLAG_BUF_AVAILABLE;
2347 ASSERT(no_callback || hdr->b_l1hdr.b_datacnt > 1 ||
2348 refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
2368 arc_buf_hdr_t *hdr;
2375 hdr = buf->b_hdr;
2376 if (hdr == NULL) {
2389 ASSERT3P(hdr, ==, &arc_eviction_hdr);
2394 if (hdr->b_l1hdr.b_datacnt > 1 && HDR_ISTYPE_DATA(hdr))
2414 arc_evict_hdr(arc_buf_hdr_t *hdr, kmutex_t *hash_lock)
2420 ASSERT(HDR_HAS_L1HDR(hdr));
2422 state = hdr->b_l1hdr.b_state;
2424 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
2425 ASSERT(hdr->b_l1hdr.b_buf == NULL);
2434 if (HDR_HAS_L2HDR(hdr) && HDR_L2_WRITING(hdr)) {
2440 bytes_evicted += hdr->b_size;
2442 DTRACE_PROBE1(arc__delete, arc_buf_hdr_t *, hdr);
2444 if (HDR_HAS_L2HDR(hdr)) {
2449 arc_change_state(arc_l2c_only, hdr, hash_lock);
2454 hdr = arc_hdr_realloc(hdr, hdr_full_cache,
2457 arc_change_state(arc_anon, hdr, hash_lock);
2458 arc_hdr_destroy(hdr);
2467 if (HDR_IO_IN_PROGRESS(hdr) ||
2468 ((hdr->b_flags & (ARC_FLAG_PREFETCH | ARC_FLAG_INDIRECT)) &&
2469 ddi_get_lbolt() - hdr->b_l1hdr.b_arc_access <
2475 ASSERT0(refcount_count(&hdr->b_l1hdr.b_refcnt));
2476 ASSERT3U(hdr->b_l1hdr.b_datacnt, >, 0);
2477 while (hdr->b_l1hdr.b_buf) {
2478 arc_buf_t *buf = hdr->b_l1hdr.b_buf;
2484 bytes_evicted += hdr->b_size;
2488 hdr->b_l1hdr.b_buf = buf->b_next;
2501 if (HDR_HAS_L2HDR(hdr)) {
2502 ARCSTAT_INCR(arcstat_evict_l2_cached, hdr->b_size);
2504 if (l2arc_write_eligible(hdr->b_spa, UINT64_MAX, hdr))
2505 ARCSTAT_INCR(arcstat_evict_l2_eligible, hdr->b_size);
2507 ARCSTAT_INCR(arcstat_evict_l2_ineligible, hdr->b_size);
2510 if (hdr->b_l1hdr.b_datacnt == 0) {
2511 arc_change_state(evicted_state, hdr, hash_lock);
2512 ASSERT(HDR_IN_HASH_TABLE(hdr));
2513 hdr->b_flags |= ARC_FLAG_IN_HASH_TABLE;
2514 hdr->b_flags &= ~ARC_FLAG_BUF_AVAILABLE;
2515 DTRACE_PROBE1(arc__evict, arc_buf_hdr_t *, hdr);
2527 arc_buf_hdr_t *hdr;
2536 for (hdr = multilist_sublist_prev(mls, marker); hdr != NULL;
2537 hdr = multilist_sublist_prev(mls, marker)) {
2544 * forward. Since we're not holding hdr's hash lock, we
2545 * must be very careful and not remove 'hdr' from the
2547 * 'hdr' as not being on a sublist when they call the
2564 if (hdr->b_spa == 0)
2568 if (spa != 0 && hdr->b_spa != spa) {
2573 hash_lock = HDR_LOCK(hdr);
2587 uint64_t evicted = arc_evict_hdr(hdr, hash_lock);
3633 arc_buf_hdr_t *hdr = buf->b_hdr;
3634 arc_state_t *state = hdr->b_l1hdr.b_state;
3647 if (multilist_link_active(&hdr->b_l1hdr.b_arc_node)) {
3648 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
3649 atomic_add_64(&hdr->b_l1hdr.b_state->arcs_lsize[type],
3656 if (arc_size < arc_c && hdr->b_l1hdr.b_state == arc_anon &&
3668 arc_access(arc_buf_hdr_t *hdr, kmutex_t *hash_lock)
3673 ASSERT(HDR_HAS_L1HDR(hdr));
3675 if (hdr->b_l1hdr.b_state == arc_anon) {
3682 ASSERT0(hdr->b_l1hdr.b_arc_access);
3683 hdr->b_l1hdr.b_arc_access = ddi_get_lbolt();
3684 DTRACE_PROBE1(new_state__mru, arc_buf_hdr_t *, hdr);
3685 arc_change_state(arc_mru, hdr, hash_lock);
3687 } else if (hdr->b_l1hdr.b_state == arc_mru) {
3698 if (HDR_PREFETCH(hdr)) {
3699 if (refcount_count(&hdr->b_l1hdr.b_refcnt) == 0) {
3702 &hdr->b_l1hdr.b_arc_node));
3704 hdr->b_flags &= ~ARC_FLAG_PREFETCH;
3707 hdr->b_l1hdr.b_arc_access = now;
3716 if (now > hdr->b_l1hdr.b_arc_access + ARC_MINTIME) {
3722 hdr->b_l1hdr.b_arc_access = now;
3723 DTRACE_PROBE1(new_state__mfu, arc_buf_hdr_t *, hdr);
3724 arc_change_state(arc_mfu, hdr, hash_lock);
3727 } else if (hdr->b_l1hdr.b_state == arc_mru_ghost) {
3735 if (HDR_PREFETCH(hdr)) {
3737 if (refcount_count(&hdr->b_l1hdr.b_refcnt) > 0)
3738 hdr->b_flags &= ~ARC_FLAG_PREFETCH;
3739 DTRACE_PROBE1(new_state__mru, arc_buf_hdr_t *, hdr);
3742 DTRACE_PROBE1(new_state__mfu, arc_buf_hdr_t *, hdr);
3745 hdr->b_l1hdr.b_arc_access = ddi_get_lbolt();
3746 arc_change_state(new_state, hdr, hash_lock);
3749 } else if (hdr->b_l1hdr.b_state == arc_mfu) {
3759 if ((HDR_PREFETCH(hdr)) != 0) {
3760 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
3762 ASSERT(multilist_link_active(&hdr->b_l1hdr.b_arc_node));
3765 hdr->b_l1hdr.b_arc_access = ddi_get_lbolt();
3766 } else if (hdr->b_l1hdr.b_state == arc_mfu_ghost) {
3774 if (HDR_PREFETCH(hdr)) {
3779 ASSERT0(refcount_count(&hdr->b_l1hdr.b_refcnt));
3783 hdr->b_l1hdr.b_arc_access = ddi_get_lbolt();
3784 DTRACE_PROBE1(new_state__mfu, arc_buf_hdr_t *, hdr);
3785 arc_change_state(new_state, hdr, hash_lock);
3788 } else if (hdr->b_l1hdr.b_state == arc_l2c_only) {
3793 hdr->b_l1hdr.b_arc_access = ddi_get_lbolt();
3794 DTRACE_PROBE1(new_state__mfu, arc_buf_hdr_t *, hdr);
3795 arc_change_state(arc_mfu, hdr, hash_lock);
3828 arc_buf_hdr_t *hdr;
3836 hdr = buf->b_hdr;
3839 * The hdr was inserted into hash-table and removed from lists
3846 if (HDR_IN_HASH_TABLE(hdr)) {
3847 ASSERT3U(hdr->b_birth, ==, BP_PHYSICAL_BIRTH(zio->io_bp));
3848 ASSERT3U(hdr->b_dva.dva_word[0], ==,
3850 ASSERT3U(hdr->b_dva.dva_word[1], ==,
3853 arc_buf_hdr_t *found = buf_hash_find(hdr->b_spa, zio->io_bp,
3856 ASSERT((found == NULL && HDR_FREED_IN_READ(hdr) &&
3858 (found == hdr &&
3859 DVA_EQUAL(&hdr->b_dva, BP_IDENTITY(zio->io_bp))) ||
3860 (found == hdr && HDR_L2_READING(hdr)));
3863 hdr->b_flags &= ~ARC_FLAG_L2_EVICTED;
3864 if (l2arc_noprefetch && HDR_PREFETCH(hdr))
3865 hdr->b_flags &= ~ARC_FLAG_L2CACHE;
3868 callback_list = hdr->b_l1hdr.b_acb;
3876 func(buf->b_data, hdr->b_size);
3883 hdr->b_l1hdr.b_state == arc_anon) {
3890 arc_access(hdr, hash_lock);
3905 hdr->b_l1hdr.b_acb = NULL;
3906 hdr->b_flags &= ~ARC_FLAG_IO_IN_PROGRESS;
3907 ASSERT(!HDR_BUF_AVAILABLE(hdr));
3910 ASSERT(hdr->b_l1hdr.b_datacnt == 1);
3911 hdr->b_flags |= ARC_FLAG_BUF_AVAILABLE;
3914 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt) ||
3918 hdr->b_flags |= ARC_FLAG_IO_ERROR;
3919 if (hdr->b_l1hdr.b_state != arc_anon)
3920 arc_change_state(arc_anon, hdr, hash_lock);
3921 if (HDR_IN_HASH_TABLE(hdr))
3922 buf_hash_remove(hdr);
3923 freeable = refcount_is_zero(&hdr->b_l1hdr.b_refcnt);
3928 * that the hdr (and hence the cv) might be freed before we get to
3931 cv_broadcast(&hdr->b_l1hdr.b_cv);
3942 ASSERT3P(hdr->b_l1hdr.b_state, ==, arc_anon);
3943 freeable = refcount_is_zero(&hdr->b_l1hdr.b_refcnt);
3961 arc_hdr_destroy(hdr);
3987 arc_buf_hdr_t *hdr = NULL;
4002 hdr = buf_hash_find(guid, bp, &hash_lock);
4005 if (hdr != NULL && HDR_HAS_L1HDR(hdr) && hdr->b_l1hdr.b_datacnt > 0) {
4009 if (HDR_IO_IN_PROGRESS(hdr)) {
4011 if ((hdr->b_flags & ARC_FLAG_PRIO_ASYNC_READ) &&
4034 arc_buf_hdr_t *, hdr);
4037 if (hdr->b_flags & ARC_FLAG_PREDICTIVE_PREFETCH) {
4038 hdr->b_flags &= ~ARC_FLAG_PREDICTIVE_PREFETCH;
4042 cv_wait(&hdr->b_l1hdr.b_cv, hash_lock);
4060 acb->acb_next = hdr->b_l1hdr.b_acb;
4061 hdr->b_l1hdr.b_acb = acb;
4062 add_reference(hdr, hash_lock, private);
4070 ASSERT(hdr->b_l1hdr.b_state == arc_mru ||
4071 hdr->b_l1hdr.b_state == arc_mfu);
4074 if (hdr->b_flags & ARC_FLAG_PREDICTIVE_PREFETCH) {
4082 arc_buf_hdr_t *, hdr);
4085 hdr->b_flags &= ~ARC_FLAG_PREDICTIVE_PREFETCH;
4087 add_reference(hdr, hash_lock, private);
4093 buf = hdr->b_l1hdr.b_buf;
4096 if (HDR_BUF_AVAILABLE(hdr)) {
4098 hdr->b_flags &= ~ARC_FLAG_BUF_AVAILABLE;
4104 refcount_count(&hdr->b_l1hdr.b_refcnt) == 0) {
4105 hdr->b_flags |= ARC_FLAG_PREFETCH;
4107 DTRACE_PROBE1(arc__hit, arc_buf_hdr_t *, hdr);
4108 arc_access(hdr, hash_lock);
4110 hdr->b_flags |= ARC_FLAG_L2CACHE;
4112 hdr->b_flags |= ARC_FLAG_L2COMPRESS;
4115 ARCSTAT_CONDSTAT(!HDR_PREFETCH(hdr),
4116 demand, prefetch, !HDR_ISTYPE_METADATA(hdr),
4130 if (hdr == NULL) {
4135 hdr = buf->b_hdr;
4137 hdr->b_dva = *BP_IDENTITY(bp);
4138 hdr->b_birth = BP_PHYSICAL_BIRTH(bp);
4139 exists = buf_hash_insert(hdr, &hash_lock);
4144 buf_discard_identity(hdr);
4154 (void) remove_reference(hdr, hash_lock,
4158 hdr->b_flags |= ARC_FLAG_PREFETCH;
4160 hdr->b_flags |= ARC_FLAG_L2CACHE;
4162 hdr->b_flags |= ARC_FLAG_L2COMPRESS;
4164 hdr->b_flags |= ARC_FLAG_INDIRECT;
4168 * (and thus didn't have an L1 hdr), we realloc the
4169 * header to add an L1 hdr.
4171 if (!HDR_HAS_L1HDR(hdr)) {
4172 hdr = arc_hdr_realloc(hdr, hdr_l2only_cache,
4176 ASSERT(GHOST_STATE(hdr->b_l1hdr.b_state));
4177 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
4178 ASSERT(refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
4179 ASSERT3P(hdr->b_l1hdr.b_buf, ==, NULL);
4185 add_reference(hdr, hash_lock, private);
4187 hdr->b_flags |= ARC_FLAG_PREFETCH;
4189 hdr->b_flags |= ARC_FLAG_L2CACHE;
4191 hdr->b_flags |= ARC_FLAG_L2COMPRESS;
4193 buf->b_hdr = hdr;
4198 hdr->b_l1hdr.b_buf = buf;
4199 ASSERT0(hdr->b_l1hdr.b_datacnt);
4200 hdr->b_l1hdr.b_datacnt = 1;
4202 arc_access(hdr, hash_lock);
4206 hdr->b_flags |= ARC_FLAG_PREDICTIVE_PREFETCH;
4207 ASSERT(!GHOST_STATE(hdr->b_l1hdr.b_state));
4213 ASSERT(hdr->b_l1hdr.b_acb == NULL);
4214 hdr->b_l1hdr.b_acb = acb;
4215 hdr->b_flags |= ARC_FLAG_IO_IN_PROGRESS;
4217 if (HDR_HAS_L2HDR(hdr) &&
4218 (vd = hdr->b_l2hdr.b_dev->l2ad_vdev) != NULL) {
4219 devw = hdr->b_l2hdr.b_dev->l2ad_writing;
4220 addr = hdr->b_l2hdr.b_daddr;
4221 b_compress = hdr->b_l2hdr.b_compress;
4222 b_asize = hdr->b_l2hdr.b_asize;
4238 ASSERT3U(hdr->b_size, ==, size);
4239 DTRACE_PROBE4(arc__miss, arc_buf_hdr_t *, hdr, blkptr_t *, bp,
4242 ARCSTAT_CONDSTAT(!HDR_PREFETCH(hdr),
4243 demand, prefetch, !HDR_ISTYPE_METADATA(hdr),
4247 hdr->b_flags |= ARC_FLAG_PRIO_ASYNC_READ;
4249 hdr->b_flags &= ~ARC_FLAG_PRIO_ASYNC_READ;
4261 if (HDR_HAS_L2HDR(hdr) &&
4262 !HDR_L2_WRITING(hdr) && !HDR_L2_EVICTED(hdr) &&
4263 !(l2arc_noprefetch && HDR_PREFETCH(hdr))) {
4266 DTRACE_PROBE1(l2arc__hit, arc_buf_hdr_t *, hdr);
4321 arc_buf_hdr_t *, hdr);
4323 if (HDR_L2_WRITING(hdr))
4332 arc_buf_hdr_t *, hdr);
4369 arc_buf_hdr_t *hdr;
4375 hdr = buf_hash_find(guid, bp, &hash_lock);
4376 if (hdr == NULL)
4378 if (HDR_BUF_AVAILABLE(hdr)) {
4379 arc_buf_t *buf = hdr->b_l1hdr.b_buf;
4380 add_reference(hdr, hash_lock, FTAG);
4381 hdr->b_flags &= ~ARC_FLAG_BUF_AVAILABLE;
4408 arc_buf_hdr_t *hdr;
4414 hdr = buf->b_hdr;
4415 if (hdr == NULL) {
4432 hash_lock = HDR_LOCK(hdr);
4434 hdr = buf->b_hdr;
4435 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
4437 ASSERT3U(refcount_count(&hdr->b_l1hdr.b_refcnt), <,
4438 hdr->b_l1hdr.b_datacnt);
4439 ASSERT(hdr->b_l1hdr.b_state == arc_mru ||
4440 hdr->b_l1hdr.b_state == arc_mfu);
4445 if (hdr->b_l1hdr.b_datacnt > 1) {
4449 ASSERT(buf == hdr->b_l1hdr.b_buf);
4450 hdr->b_flags |= ARC_FLAG_BUF_AVAILABLE;
4463 * a new hdr for the buffer.
4468 arc_buf_hdr_t *hdr = buf->b_hdr;
4478 ASSERT(HDR_HAS_L1HDR(hdr));
4485 if (hdr->b_l1hdr.b_state == arc_anon) {
4487 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
4488 ASSERT(!HDR_IN_HASH_TABLE(hdr));
4489 ASSERT(!HDR_HAS_L2HDR(hdr));
4490 ASSERT(BUF_EMPTY(hdr));
4492 ASSERT3U(hdr->b_l1hdr.b_datacnt, ==, 1);
4493 ASSERT3S(refcount_count(&hdr->b_l1hdr.b_refcnt), ==, 1);
4494 ASSERT(!list_link_active(&hdr->b_l1hdr.b_arc_node));
4499 hdr->b_l1hdr.b_arc_access = 0;
4505 kmutex_t *hash_lock = HDR_LOCK(hdr);
4513 arc_state_t *state = hdr->b_l1hdr.b_state;
4514 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
4518 ASSERT(refcount_count(&hdr->b_l1hdr.b_refcnt) > 0);
4520 if (HDR_HAS_L2HDR(hdr)) {
4521 mutex_enter(&hdr->b_l2hdr.b_dev->l2ad_mtx);
4531 if (HDR_HAS_L2HDR(hdr))
4532 arc_hdr_l2hdr_destroy(hdr);
4534 mutex_exit(&hdr->b_l2hdr.b_dev->l2ad_mtx);
4540 if (hdr->b_l1hdr.b_datacnt > 1) {
4543 uint64_t blksz = hdr->b_size;
4544 uint64_t spa = hdr->b_spa;
4545 arc_buf_contents_t type = arc_buf_type(hdr);
4546 uint32_t flags = hdr->b_flags;
4548 ASSERT(hdr->b_l1hdr.b_buf != buf || buf->b_next != NULL);
4550 * Pull the data off of this hdr and attach it to
4551 * a new anonymous hdr.
4553 (void) remove_reference(hdr, hash_lock, tag);
4554 bufp = &hdr->b_l1hdr.b_buf;
4563 &state->arcs_size, hdr->b_size, buf);
4565 if (refcount_is_zero(&hdr->b_l1hdr.b_refcnt)) {
4568 ASSERT3U(*size, >=, hdr->b_size);
4569 atomic_add_64(size, -hdr->b_size);
4576 if (HDR_ISTYPE_DATA(hdr)) {
4579 -hdr->b_size);
4581 hdr->b_l1hdr.b_datacnt -= 1;
4608 ASSERT(refcount_count(&hdr->b_l1hdr.b_refcnt) == 1);
4609 /* protected by hash lock, or hdr is on arc_anon */
4610 ASSERT(!multilist_link_active(&hdr->b_l1hdr.b_arc_node));
4611 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
4612 arc_change_state(arc_anon, hdr, hash_lock);
4613 hdr->b_l1hdr.b_arc_access = 0;
4616 buf_discard_identity(hdr);
4653 arc_buf_hdr_t *hdr = buf->b_hdr;
4655 ASSERT(HDR_HAS_L1HDR(hdr));
4657 ASSERT(hdr->b_l1hdr.b_datacnt > 0);
4666 if (HDR_IO_IN_PROGRESS(hdr)) {
4667 mutex_enter(&hdr->b_l1hdr.b_freeze_lock);
4668 if (hdr->b_freeze_cksum != NULL) {
4669 kmem_free(hdr->b_freeze_cksum, sizeof (zio_cksum_t));
4670 hdr->b_freeze_cksum = NULL;
4672 mutex_exit(&hdr->b_l1hdr.b_freeze_lock);
4675 hdr->b_flags |= ARC_FLAG_IO_IN_PROGRESS;
4695 arc_buf_hdr_t *hdr = buf->b_hdr;
4697 ASSERT(hdr->b_l1hdr.b_acb == NULL);
4701 buf_discard_identity(hdr);
4703 hdr->b_dva = *BP_IDENTITY(zio->io_bp);
4704 hdr->b_birth = BP_PHYSICAL_BIRTH(zio->io_bp);
4707 ASSERT(BUF_EMPTY(hdr));
4716 if (!BUF_EMPTY(hdr)) {
4724 exists = buf_hash_insert(hdr, &hash_lock);
4733 panic("bad overwrite, hdr=%p exists=%p",
4734 (void *)hdr, (void *)exists);
4740 exists = buf_hash_insert(hdr, &hash_lock);
4746 panic("bad nopwrite, hdr=%p exists=%p",
4747 (void *)hdr, (void *)exists);
4750 ASSERT(hdr->b_l1hdr.b_datacnt == 1);
4751 ASSERT(hdr->b_l1hdr.b_state == arc_anon);
4756 hdr->b_flags &= ~ARC_FLAG_IO_IN_PROGRESS;
4758 if (exists == NULL && hdr->b_l1hdr.b_state == arc_anon)
4759 arc_access(hdr, hash_lock);
4762 hdr->b_flags &= ~ARC_FLAG_IO_IN_PROGRESS;
4765 ASSERT(!refcount_is_zero(&hdr->b_l1hdr.b_refcnt));
4778 arc_buf_hdr_t *hdr = buf->b_hdr;
4784 ASSERT(!HDR_IO_ERROR(hdr));
4785 ASSERT(!HDR_IO_IN_PROGRESS(hdr));
4786 ASSERT(hdr->b_l1hdr.b_acb == NULL);
4787 ASSERT(hdr->b_l1hdr.b_datacnt > 0);
4789 hdr->b_flags |= ARC_FLAG_L2CACHE;
4791 hdr->b_flags |= ARC_FLAG_L2COMPRESS;
4799 zio = zio_write(pio, spa, txg, bp, buf->b_data, hdr->b_size, zp,
4956 arc_buf_hdr_t *hdr = obj;
4963 ASSERT(!BUF_EMPTY(hdr));
4977 return (buf_hash(hdr->b_spa, &hdr->b_dva, hdr->b_birth) %
5396 l2arc_write_eligible(uint64_t spa_guid, uint64_t sync_txg, arc_buf_hdr_t *hdr)
5406 if (hdr->b_spa != spa_guid || HDR_HAS_L2HDR(hdr) ||
5407 HDR_IO_IN_PROGRESS(hdr) || !HDR_L2CACHE(hdr) ||
5408 hdr->b_birth >= sync_txg)
5555 arc_buf_hdr_t *head, *hdr, *hdr_prev;
5578 for (hdr = list_prev(buflist, head); hdr; hdr = hdr_prev) {
5579 hdr_prev = list_prev(buflist, hdr);
5581 hash_lock = HDR_LOCK(hdr);
5602 list_insert_after(buflist, hdr, head);
5622 ASSERT(HDR_HAS_L1HDR(hdr));
5628 l2arc_release_cdata_buf(hdr);
5634 list_remove(buflist, hdr);
5635 hdr->b_flags &= ~ARC_FLAG_HAS_L2HDR;
5637 ARCSTAT_INCR(arcstat_l2_asize, -hdr->b_l2hdr.b_asize);
5638 ARCSTAT_INCR(arcstat_l2_size, -hdr->b_size);
5640 bytes_dropped += hdr->b_l2hdr.b_asize;
5642 hdr->b_l2hdr.b_asize, hdr);
5649 hdr->b_flags &= ~ARC_FLAG_L2_WRITING;
5675 arc_buf_hdr_t *hdr;
5692 hdr = buf->b_hdr;
5693 ASSERT3P(hash_lock, ==, HDR_LOCK(hdr));
5699 l2arc_decompress_zio(zio, hdr, cb->l2rcb_compress);
5701 ASSERT3U(zio->io_size, ==, hdr->b_size);
5702 ASSERT3U(BP_GET_LSIZE(&cb->l2rcb_bp), ==, hdr->b_size);
5708 if (equal && zio->io_error == 0 && !HDR_L2_EVICTED(hdr)) {
5739 buf->b_data, hdr->b_size, arc_read_done, buf,
5800 arc_buf_hdr_t *hdr, *hdr_prev;
5828 for (hdr = list_tail(buflist); hdr; hdr = hdr_prev) {
5829 hdr_prev = list_prev(buflist, hdr);
5831 hash_lock = HDR_LOCK(hdr);
5849 if (HDR_L2_WRITE_HEAD(hdr)) {
5854 list_remove(buflist, hdr);
5859 if (!all && HDR_HAS_L2HDR(hdr) &&
5860 (hdr->b_l2hdr.b_daddr > taddr ||
5861 hdr->b_l2hdr.b_daddr < dev->l2ad_hand)) {
5870 ASSERT(HDR_HAS_L2HDR(hdr));
5871 if (!HDR_HAS_L1HDR(hdr)) {
5872 ASSERT(!HDR_L2_READING(hdr));
5878 arc_change_state(arc_anon, hdr, hash_lock);
5879 arc_hdr_destroy(hdr);
5881 ASSERT(hdr->b_l1hdr.b_state != arc_l2c_only);
5888 if (HDR_L2_READING(hdr)) {
5890 hdr->b_flags |= ARC_FLAG_L2_EVICTED;
5894 ASSERT(!HDR_L2_WRITING(hdr));
5895 ASSERT3P(hdr->b_l1hdr.b_tmp_cdata, ==, NULL);
5897 arc_hdr_l2hdr_destroy(hdr);
5919 arc_buf_hdr_t *hdr, *hdr_prev, *head;
5962 hdr = multilist_sublist_head(mls);
5964 hdr = multilist_sublist_tail(mls);
5970 for (; hdr; hdr = hdr_prev) {
5976 hdr_prev = multilist_sublist_next(mls, hdr);
5978 hdr_prev = multilist_sublist_prev(mls, hdr);
5980 hash_lock = HDR_LOCK(hdr);
5988 passed_sz += hdr->b_size;
5997 if (!l2arc_write_eligible(guid, sync_txg, hdr)) {
6007 buf_sz = hdr->b_size;
6037 hdr->b_l2hdr.b_dev = dev;
6038 hdr->b_flags |= ARC_FLAG_L2_WRITING;
6047 hdr->b_l2hdr.b_compress = ZIO_COMPRESS_OFF;
6048 hdr->b_l2hdr.b_asize = hdr->b_size;
6049 hdr->b_l1hdr.b_tmp_cdata = hdr->b_l1hdr.b_buf->b_data;
6072 hdr->b_l2hdr.b_daddr = L2ARC_ADDR_UNSET;
6074 hdr->b_flags |= ARC_FLAG_HAS_L2HDR;
6077 list_insert_head(&dev->l2ad_buflist, hdr);
6084 arc_cksum_verify(hdr->b_l1hdr.b_buf);
6085 arc_cksum_compute(hdr->b_l1hdr.b_buf, B_TRUE);
6127 for (hdr = list_prev(&dev->l2ad_buflist, head); hdr;
6128 hdr = list_prev(&dev->l2ad_buflist, hdr)) {
6137 ASSERT(HDR_HAS_L1HDR(hdr));
6143 * particular, hdr->l1hdr.b_buf may be invalid by now due to
6146 hdr->b_l2hdr.b_daddr = dev->l2ad_hand;
6148 if ((HDR_L2COMPRESS(hdr)) &&
6149 hdr->b_l2hdr.b_asize >= buf_compress_minsz) {
6150 if (l2arc_compress_buf(hdr)) {
6163 buf_data = hdr->b_l1hdr.b_tmp_cdata;
6164 buf_sz = hdr->b_l2hdr.b_asize;
6171 (void) refcount_add_many(&dev->l2ad_alloc, buf_sz, hdr);
6242 l2arc_compress_buf(arc_buf_hdr_t *hdr)
6246 ASSERT(HDR_HAS_L2HDR(hdr));
6247 l2arc_buf_hdr_t *l2hdr = &hdr->b_l2hdr;
6249 ASSERT(HDR_HAS_L1HDR(hdr));
6251 ASSERT(hdr->b_l1hdr.b_tmp_cdata != NULL);
6256 csize = zio_compress_data(ZIO_COMPRESS_LZ4, hdr->b_l1hdr.b_tmp_cdata,
6270 hdr->b_l1hdr.b_tmp_cdata = NULL;
6280 hdr->b_l1hdr.b_tmp_cdata = cdata;
6306 l2arc_decompress_zio(zio_t *zio, arc_buf_hdr_t *hdr, enum zio_compress c)
6315 zio->io_orig_size = zio->io_size = hdr->b_size;
6325 ASSERT(hdr->b_l1hdr.b_buf != NULL);
6326 bzero(hdr->b_l1hdr.b_buf->b_data, hdr->b_size);
6327 zio->io_data = zio->io_orig_data = hdr->b_l1hdr.b_buf->b_data;
6347 hdr->b_size) != 0)
6353 zio->io_orig_size = zio->io_size = hdr->b_size;
6363 l2arc_release_cdata_buf(arc_buf_hdr_t *hdr)
6365 ASSERT(HDR_HAS_L2HDR(hdr));
6366 enum zio_compress comp = hdr->b_l2hdr.b_compress;
6368 ASSERT(HDR_HAS_L1HDR(hdr));
6377 hdr->b_l1hdr.b_tmp_cdata = NULL;
6384 ASSERT3P(hdr->b_l1hdr.b_tmp_cdata, ==, NULL);
6390 ASSERT(hdr->b_l1hdr.b_tmp_cdata != NULL);
6391 zio_data_buf_free(hdr->b_l1hdr.b_tmp_cdata,
6392 hdr->b_size);
6393 hdr->b_l1hdr.b_tmp_cdata = NULL;