Lines Matching full:bytes
42 u64 bytes; member
52 u64 *bytes, bool for_alloc);
57 u64 bytes, bool update_stats);
580 static int io_ctl_add_entry(struct btrfs_io_ctl *io_ctl, u64 offset, u64 bytes, in io_ctl_add_entry() argument
590 put_unaligned_le64(bytes, &entry->bytes); in io_ctl_add_entry()
664 entry->bytes = get_unaligned_le64(&e->bytes); in io_ctl_read_entry()
727 * bytes we can have, or whatever is less than that. in recalculate_thresholds()
828 if (!e->bytes) { in __load_free_space_cache()
912 const u64 bytes = info->bytes; in copy_free_space_cache() local
917 ret = btrfs_add_free_space(block_group, offset, bytes); in copy_free_space_cache()
921 u64 bytes = ctl->unit; in copy_free_space_cache() local
923 ret = search_bitmap(ctl, info, &offset, &bytes, false); in copy_free_space_cache()
925 bitmap_clear_bits(ctl, info, offset, bytes, true); in copy_free_space_cache()
928 bytes); in copy_free_space_cache()
1105 ret = io_ctl_add_entry(io_ctl, e->offset, e->bytes, in write_cache_extent_entries()
1135 trim_entry->bytes, NULL); in write_cache_extent_entries()
1574 static inline unsigned long bytes_to_bits(u64 bytes, u32 unit) in bytes_to_bits() argument
1576 return (unsigned long)(div_u64(bytes, unit)); in bytes_to_bits()
1664 * we've found already if it's larger, or we want to use ->bytes.
1666 * This matters because find_free_space() will skip entries who's ->bytes is
1667 * less than the required bytes. So if we didn't search down this bitmap, we
1670 * ->max_extent_size set to 4K and ->bytes set to 1M. A second entry hasn't set
1671 * ->max_extent_size yet, has ->bytes set to 8K and it's contiguous. We will
1687 return entry->bytes; in get_max_extent_size()
1707 * want a section that has at least bytes size and comes at or after the given
1764 prev->offset + prev->bytes > offset) in tree_search_offset()
1796 prev->offset + prev->bytes > offset) in tree_search_offset()
1801 } else if (entry->offset + entry->bytes > offset) in tree_search_offset()
1817 if (entry->offset + entry->bytes > offset) in tree_search_offset()
1836 ctl->discardable_bytes[BTRFS_STAT_CURR] -= info->bytes; in unlink_free_space()
1840 ctl->free_space -= info->bytes; in unlink_free_space()
1850 ASSERT(info->bytes || info->bitmap); in link_free_space()
1859 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in link_free_space()
1862 ctl->free_space += info->bytes; in link_free_space()
1874 * want to re-link it into our ctl bytes index. in relink_bitmap_entry()
1887 u64 offset, u64 bytes, bool update_stat) in bitmap_clear_bits() argument
1893 count = bytes_to_bits(bytes, ctl->unit); in bitmap_clear_bits()
1899 info->bytes -= bytes; in bitmap_clear_bits()
1914 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in bitmap_clear_bits()
1918 ctl->free_space -= bytes; in bitmap_clear_bits()
1923 u64 bytes) in btrfs_bitmap_set_bits() argument
1929 count = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_set_bits()
1936 * We set some bytes, we have no idea what the max extent size is in btrfs_bitmap_set_bits()
1940 info->bytes += bytes; in btrfs_bitmap_set_bits()
1941 ctl->free_space += bytes; in btrfs_bitmap_set_bits()
1954 ctl->discardable_bytes[BTRFS_STAT_CURR] += bytes; in btrfs_bitmap_set_bits()
1959 * If we can not find suitable extent, we will use bytes to record
1964 u64 *bytes, bool for_alloc) in search_bitmap() argument
1978 bitmap_info->max_extent_size < *bytes) { in search_bitmap()
1979 *bytes = bitmap_info->max_extent_size; in search_bitmap()
1985 bits = bytes_to_bits(*bytes, ctl->unit); in search_bitmap()
2006 *bytes = (u64)(found_bits) * ctl->unit; in search_bitmap()
2010 *bytes = (u64)(max_bits) * ctl->unit; in search_bitmap()
2011 bitmap_info->max_extent_size = *bytes; in search_bitmap()
2016 /* Cache the size of the max extent in bytes */
2018 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
2049 * If we are using the bytes index then all subsequent entries in find_free_space()
2050 * in this tree are going to be < bytes, so simply set the max in find_free_space()
2056 if (entry->bytes < *bytes) { in find_free_space()
2067 if (*bytes >= align) { in find_free_space()
2078 * We don't break here if we're using the bytes index because we in find_free_space()
2084 if (entry->bytes < *bytes + align_off) { in find_free_space()
2092 u64 size = *bytes; in find_free_space()
2097 *bytes = size; in find_free_space()
2117 *bytes = entry->bytes - align_off; in find_free_space()
2128 info->bytes = 0; in add_new_bitmap()
2145 if (bitmap_info->bytes && !btrfs_free_space_trimmed(bitmap_info)) { in free_bitmap()
2148 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bitmap_info->bytes; in free_bitmap()
2160 u64 *offset, u64 *bytes) in remove_from_bitmap() argument
2184 search_bytes = min(search_bytes, *bytes); in remove_from_bitmap()
2191 *bytes -= search_bytes; in remove_from_bitmap()
2193 if (*bytes) { in remove_from_bitmap()
2195 if (!bitmap_info->bytes) in remove_from_bitmap()
2199 * no entry after this bitmap, but we still have bytes to in remove_from_bitmap()
2229 } else if (!bitmap_info->bytes) in remove_from_bitmap()
2237 u64 bytes, enum btrfs_trim_state trim_state) in add_bytes_to_bitmap() argument
2250 ctl->discardable_bytes[BTRFS_STAT_CURR] += info->bytes; in add_bytes_to_bitmap()
2257 bytes_to_set = min(end - offset, bytes); in add_bytes_to_bitmap()
2278 if (!forced && info->bytes >= FORCE_EXTENT_THRESHOLD) in use_bitmap()
2293 if (info->bytes <= fs_info->sectorsize * 8) { in use_bitmap()
2325 u64 bytes, offset, bytes_added; in insert_into_bitmap() local
2329 bytes = info->bytes; in insert_into_bitmap()
2367 bytes, trim_state); in insert_into_bitmap()
2368 bytes -= bytes_added; in insert_into_bitmap()
2372 if (!bytes) { in insert_into_bitmap()
2386 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in insert_into_bitmap()
2388 bytes -= bytes_added; in insert_into_bitmap()
2392 if (!bytes) { in insert_into_bitmap()
2464 u64 bytes = info->bytes; in try_merge_free_space() local
2473 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2486 info->bytes += right_info->bytes; in try_merge_free_space()
2493 left_info->offset + left_info->bytes == offset && in try_merge_free_space()
2497 info->bytes += left_info->bytes; in try_merge_free_space()
2512 const u64 end = info->offset + info->bytes; in steal_from_bitmap_to_end()
2514 u64 bytes; in steal_from_bitmap_to_end() local
2524 bytes = (j - i) * ctl->unit; in steal_from_bitmap_to_end()
2525 info->bytes += bytes; in steal_from_bitmap_to_end()
2531 bitmap_clear_bits(ctl, bitmap, end, bytes, update_stat); in steal_from_bitmap_to_end()
2533 if (!bitmap->bytes) in steal_from_bitmap_to_end()
2548 u64 bytes; in steal_from_bitmap_to_front() local
2574 bytes = (i + 1) * ctl->unit; in steal_from_bitmap_to_front()
2576 bytes = (i - prev_j) * ctl->unit; in steal_from_bitmap_to_front()
2578 info->offset -= bytes; in steal_from_bitmap_to_front()
2579 info->bytes += bytes; in steal_from_bitmap_to_front()
2585 bitmap_clear_bits(ctl, bitmap, info->offset, bytes, update_stat); in steal_from_bitmap_to_front()
2587 if (!bitmap->bytes) in steal_from_bitmap_to_front()
2630 u64 offset, u64 bytes, in __btrfs_add_free_space() argument
2637 u64 filter_bytes = bytes; in __btrfs_add_free_space()
2646 info->bytes = bytes; in __btrfs_add_free_space()
2677 filter_bytes = max(filter_bytes, info->bytes); in __btrfs_add_free_space()
2807 u64 offset, u64 bytes) in btrfs_remove_free_space() argument
2827 offset + bytes) { in btrfs_remove_free_space()
2829 offset + bytes - block_group->start; in btrfs_remove_free_space()
2838 if (!bytes) in btrfs_remove_free_space()
2864 u64 to_free = min(bytes, info->bytes); in btrfs_remove_free_space()
2866 info->bytes -= to_free; in btrfs_remove_free_space()
2868 if (info->bytes) { in btrfs_remove_free_space()
2876 bytes -= to_free; in btrfs_remove_free_space()
2879 u64 old_end = info->bytes + info->offset; in btrfs_remove_free_space()
2881 info->bytes = offset - info->offset; in btrfs_remove_free_space()
2887 /* Not enough bytes in this entry to satisfy us */ in btrfs_remove_free_space()
2888 if (old_end < offset + bytes) { in btrfs_remove_free_space()
2889 bytes -= old_end - offset; in btrfs_remove_free_space()
2892 } else if (old_end == offset + bytes) { in btrfs_remove_free_space()
2899 offset + bytes, in btrfs_remove_free_space()
2900 old_end - (offset + bytes), in btrfs_remove_free_space()
2907 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2920 u64 bytes) in btrfs_dump_free_space() argument
2943 if (info->bytes >= bytes && !block_group->ro) in btrfs_dump_free_space()
2945 btrfs_crit(fs_info, "entry offset %llu, bytes %llu, bitmap %s", in btrfs_dump_free_space()
2946 info->offset, info->bytes, str_yes_no(info->bitmap)); in btrfs_dump_free_space()
2952 "%d free space entries at or bigger than %llu bytes", in btrfs_dump_free_space()
2953 count, bytes); in btrfs_dump_free_space()
3017 entry->bytes; in __btrfs_return_cluster_to_free_space()
3027 entry->bytes; in __btrfs_return_cluster_to_free_space()
3091 u64 offset, u64 bytes, u64 empty_size, in btrfs_find_space_for_alloc() argument
3098 u64 bytes_search = bytes + empty_size; in btrfs_find_space_for_alloc()
3116 bitmap_clear_bits(ctl, entry, offset, bytes, true); in btrfs_find_space_for_alloc()
3119 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3121 if (!entry->bytes) in btrfs_find_space_for_alloc()
3130 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_find_space_for_alloc()
3132 entry->offset = offset + bytes; in btrfs_find_space_for_alloc()
3133 WARN_ON(entry->bytes < bytes + align_gap_len); in btrfs_find_space_for_alloc()
3135 entry->bytes -= bytes + align_gap_len; in btrfs_find_space_for_alloc()
3136 if (!entry->bytes) in btrfs_find_space_for_alloc()
3197 u64 bytes, u64 min_start, in btrfs_alloc_from_bitmap() argument
3203 u64 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3207 search_bytes = bytes; in btrfs_alloc_from_bitmap()
3217 bitmap_clear_bits(ctl, entry, ret, bytes, false); in btrfs_alloc_from_bitmap()
3223 * given a cluster, try to allocate 'bytes' from it, returns 0
3228 struct btrfs_free_cluster *cluster, u64 bytes, in btrfs_alloc_from_cluster() argument
3241 if (bytes > cluster->max_size) in btrfs_alloc_from_cluster()
3253 if (entry->bytes < bytes) in btrfs_alloc_from_cluster()
3257 if (entry->bytes < bytes || in btrfs_alloc_from_cluster()
3269 cluster, entry, bytes, in btrfs_alloc_from_cluster()
3280 cluster->window_start += bytes; in btrfs_alloc_from_cluster()
3284 entry->offset += bytes; in btrfs_alloc_from_cluster()
3285 entry->bytes -= bytes; in btrfs_alloc_from_cluster()
3299 atomic64_add(bytes, &discard_ctl->discard_bytes_saved); in btrfs_alloc_from_cluster()
3301 ctl->free_space -= bytes; in btrfs_alloc_from_cluster()
3303 ctl->discardable_bytes[BTRFS_STAT_CURR] -= bytes; in btrfs_alloc_from_cluster()
3306 if (entry->bytes == 0) { in btrfs_alloc_from_cluster()
3329 u64 offset, u64 bytes, in btrfs_bitmap_cluster() argument
3347 want_bits = bytes_to_bits(bytes, ctl->unit); in btrfs_bitmap_cluster()
3416 * Try to find a cluster with at least bytes total bytes, at least one
3422 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_no_bitmap() argument
3444 while (entry->bitmap || entry->bytes < min_bytes) { in setup_cluster_no_bitmap()
3453 window_free = entry->bytes; in setup_cluster_no_bitmap()
3454 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3468 if (entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3472 window_free += entry->bytes; in setup_cluster_no_bitmap()
3473 if (entry->bytes > max_extent) in setup_cluster_no_bitmap()
3474 max_extent = entry->bytes; in setup_cluster_no_bitmap()
3477 if (window_free < bytes || max_extent < cont1_bytes) in setup_cluster_no_bitmap()
3493 if (entry->bitmap || entry->bytes < min_bytes) in setup_cluster_no_bitmap()
3499 total_size += entry->bytes; in setup_cluster_no_bitmap()
3515 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_bitmap() argument
3540 if (entry->bytes < bytes) in setup_cluster_bitmap()
3543 bytes, cont1_bytes, min_bytes); in setup_cluster_bitmap()
3557 * is to find at least bytes+empty_size.
3565 u64 offset, u64 bytes, u64 empty_size) in btrfs_find_space_cluster() argument
3582 cont1_bytes = bytes + empty_size; in btrfs_find_space_cluster()
3585 cont1_bytes = bytes; in btrfs_find_space_cluster()
3588 cont1_bytes = max(bytes, (bytes + empty_size) >> 2); in btrfs_find_space_cluster()
3598 if (ctl->free_space < bytes) { in btrfs_find_space_cluster()
3611 trace_btrfs_find_cluster(block_group, offset, bytes, empty_size, in btrfs_find_space_cluster()
3615 bytes + empty_size, in btrfs_find_space_cluster()
3619 offset, bytes + empty_size, in btrfs_find_space_cluster()
3656 u64 *total_trimmed, u64 start, u64 bytes, in do_trimming() argument
3666 const u64 end = start + bytes; in do_trimming()
3681 ret = btrfs_discard_extent(fs_info, start, bytes, &trimmed); in do_trimming()
3695 __btrfs_add_free_space(block_group, start, bytes, trim_state); in do_trimming()
3729 u64 bytes; in trim_no_bitmap() local
3759 extent_bytes = entry->bytes; in trim_no_bitmap()
3763 bytes = entry->bytes; in trim_no_bitmap()
3764 if (bytes < minlen) { in trim_no_bitmap()
3771 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_no_bitmap()
3776 bytes >= (max_discard_size + in trim_no_bitmap()
3778 bytes = max_discard_size; in trim_no_bitmap()
3781 entry->bytes -= max_discard_size; in trim_no_bitmap()
3788 bytes = min(extent_start + extent_bytes, end) - start; in trim_no_bitmap()
3789 if (bytes < minlen) { in trim_no_bitmap()
3801 trim_entry.bytes = extent_bytes; in trim_no_bitmap()
3805 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_no_bitmap()
3809 block_group->discard_cursor = start + bytes; in trim_no_bitmap()
3813 start += bytes; in trim_no_bitmap()
3860 ctl->discardable_bytes[BTRFS_STAT_CURR] += entry->bytes; in reset_trimming_bitmap()
3875 ctl->discardable_bytes[BTRFS_STAT_CURR] -= entry->bytes; in end_trimming_bitmap()
3892 u64 bytes; in trim_bitmaps() local
3937 bytes = minlen; in trim_bitmaps()
3938 ret2 = search_bitmap(ctl, entry, &start, &bytes, false); in trim_bitmaps()
3964 bytes = min(bytes, end - start); in trim_bitmaps()
3965 if (bytes < minlen || (async && maxlen && bytes > maxlen)) { in trim_bitmaps()
3972 * Let bytes = BTRFS_MAX_DISCARD_SIZE + X. in trim_bitmaps()
3979 bytes > (max_discard_size + minlen)) in trim_bitmaps()
3980 bytes = max_discard_size; in trim_bitmaps()
3982 bitmap_clear_bits(ctl, entry, start, bytes, true); in trim_bitmaps()
3983 if (entry->bytes == 0) in trim_bitmaps()
3988 trim_entry.bytes = bytes; in trim_bitmaps()
3992 ret = do_trimming(block_group, total_trimmed, start, bytes, in trim_bitmaps()
3993 start, bytes, 0, &trim_entry); in trim_bitmaps()
4005 start += bytes; in trim_bitmaps()
4196 u64 offset, u64 bytes, bool bitmap) in test_add_free_space_entry() argument
4215 info->bytes = bytes; in test_add_free_space_entry()
4243 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes, in test_add_free_space_entry()
4246 bytes -= bytes_added; in test_add_free_space_entry()
4250 if (bytes) in test_add_free_space_entry()
4266 u64 offset, u64 bytes) in test_check_exists() argument
4295 offset + bytes > bit_off) { in test_check_exists()
4305 if (tmp->offset + tmp->bytes < offset) in test_check_exists()
4307 if (offset + bytes < tmp->offset) { in test_check_exists()
4319 if (offset + bytes < tmp->offset) in test_check_exists()
4321 if (tmp->offset + tmp->bytes < offset) { in test_check_exists()
4338 if (offset > info->offset && offset < info->offset + info->bytes) in test_check_exists()