Lines Matching full:start

47 		       "state leak: start %llu end %llu state %u in tree %d refs %d",  in btrfs_extent_state_leak_debug_check()
48 state->start, state->end, state->state, in btrfs_extent_state_leak_debug_check()
57 #define btrfs_debug_check_extent_io_range(tree, start, end) \ argument
58 __btrfs_debug_check_extent_io_range(__func__, (tree), (start), (end))
61 u64 start, u64 end) in __btrfs_debug_check_extent_io_range() argument
73 caller, btrfs_ino(inode), isize, start, end); in __btrfs_debug_check_extent_io_range()
198 changeset->bytes_changed += state->end - state->start + 1; in add_extent_changeset()
199 ret = ulist_add(&changeset->range_changed, state->start, state->end, in add_extent_changeset()
251 if (offset < entry->start) in tree_search_for_insert()
303 if (offset < entry->start) in tree_search_prev_next()
317 while (entry && offset < entry->start) in tree_search_prev_next()
338 "extent io tree error on %s state start %llu end %llu", in extent_io_tree_panic()
339 opname, state->start, state->end); in extent_io_tree_panic()
347 if (prev && prev->end == state->start - 1 && prev->state == state->state) { in merge_prev_state()
350 state->start = prev->start; in merge_prev_state()
362 if (next && next->start == state->end + 1 && next->state == state->state) { in merge_next_state()
427 const u64 start = state->start - 1; in insert_state() local
440 if (state->end < entry->start) { in insert_state()
441 if (try_merge && end == entry->start && in insert_state()
446 entry->start = state->start; in insert_state()
453 if (try_merge && entry->end == start && in insert_state()
495 * the tree has 'orig' at [orig->start, orig->end]. After calling, there
497 * prealloc: [orig->start, split - 1]
512 prealloc->start = orig->start; in split_state()
515 orig->start = split; in split_state()
525 if (prealloc->end < entry->start) { in split_state()
608 * The range [start, end] is inclusive.
612 int btrfs_clear_extent_bit_changeset(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_clear_extent_bit_changeset() argument
627 btrfs_debug_check_extent_io_range(tree, start, end); in btrfs_clear_extent_bit_changeset()
628 trace_btrfs_clear_extent_bit(tree, start, end - start + 1, bits); in btrfs_clear_extent_bit_changeset()
660 cached->start <= start && cached->end > start) { in btrfs_clear_extent_bit_changeset()
671 state = tree_search(tree, start); in btrfs_clear_extent_bit_changeset()
675 if (state->start > end) in btrfs_clear_extent_bit_changeset()
677 WARN_ON(state->end < start); in btrfs_clear_extent_bit_changeset()
701 if (state->start < start) { in btrfs_clear_extent_bit_changeset()
705 ret = split_state(tree, state, prealloc, start); in btrfs_clear_extent_bit_changeset()
721 * in non-atomic mode and start the search again. in btrfs_clear_extent_bit_changeset()
729 if (state->start <= end && state->end > end) { in btrfs_clear_extent_bit_changeset()
753 start = last_end + 1; in btrfs_clear_extent_bit_changeset()
773 * The range [start, end] is inclusive.
776 static void wait_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in wait_extent_bit() argument
781 btrfs_debug_check_extent_io_range(tree, start, end); in wait_extent_bit()
792 state->start <= start && start < state->end) in wait_extent_bit()
800 state = tree_search(tree, start); in wait_extent_bit()
804 if (state->start > end) in wait_extent_bit()
810 start = state->start; in wait_extent_bit()
820 start = state->end + 1; in wait_extent_bit()
822 if (start > end) in wait_extent_bit()
859 * Find the first state struct with 'bits' set after 'start', and return it.
861 * 'start'.
864 u64 start, u32 bits) in find_first_extent_bit_state() argument
872 state = tree_search(tree, start); in find_first_extent_bit_state()
889 bool btrfs_find_first_extent_bit(struct extent_io_tree *tree, u64 start, in btrfs_find_first_extent_bit() argument
899 if (state->end == start - 1 && extent_state_in_tree(state)) { in btrfs_find_first_extent_bit()
921 state = find_first_extent_bit_state(tree, start, bits); in btrfs_find_first_extent_bit()
925 *start_ret = state->start; in btrfs_find_first_extent_bit()
938 * @start: offset to start the search from
953 bool btrfs_find_contiguous_extent_bit(struct extent_io_tree *tree, u64 start, in btrfs_find_contiguous_extent_bit() argument
962 state = find_first_extent_bit_state(tree, start, bits); in btrfs_find_contiguous_extent_bit()
964 *start_ret = state->start; in btrfs_find_contiguous_extent_bit()
967 if (state->start > (*end_ret + 1)) in btrfs_find_contiguous_extent_bit()
979 * than 'max_bytes'. start and end are used to return the range,
983 bool btrfs_find_delalloc_range(struct extent_io_tree *tree, u64 *start, in btrfs_find_delalloc_range() argument
988 u64 cur_start = *start; in btrfs_find_delalloc_range()
1005 if (found && (state->start != cur_start || in btrfs_find_delalloc_range()
1015 *start = state->start; in btrfs_find_delalloc_range()
1022 total_bytes += state->end - state->start + 1; in btrfs_find_delalloc_range()
1039 * existing range is returned in failed_state in this case, and the start of the
1044 * [start, end] is inclusive This takes the tree lock.
1046 static int set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in set_extent_bit() argument
1063 btrfs_debug_check_extent_io_range(tree, start, end); in set_extent_bit()
1064 trace_btrfs_set_extent_bit(tree, start, end - start + 1, bits); in set_extent_bit()
1088 if (state->start <= start && state->end > start && in set_extent_bit()
1096 state = tree_search_for_insert(tree, start, &p, &parent); in set_extent_bit()
1101 prealloc->start = start; in set_extent_bit()
1109 last_start = state->start; in set_extent_bit()
1118 if (state->start == start && state->end <= end) { in set_extent_bit()
1120 *failed_start = state->start; in set_extent_bit()
1131 start = last_end + 1; in set_extent_bit()
1133 if (state && state->start == start && !need_resched()) in set_extent_bit()
1153 if (state->start < start) { in set_extent_bit()
1155 *failed_start = start; in set_extent_bit()
1166 start = state->end + 1; in set_extent_bit()
1174 ret = split_state(tree, state, prealloc, start); in set_extent_bit()
1187 start = last_end + 1; in set_extent_bit()
1189 if (state && state->start == start && !need_resched()) in set_extent_bit()
1201 if (state->start > start) { in set_extent_bit()
1212 prealloc->start = start; in set_extent_bit()
1228 start = inserted_state->end + 1; in set_extent_bit()
1231 if (start > end) in set_extent_bit()
1240 * need to unlock and start search again. If it's not contiguous in set_extent_bit()
1253 if (state->start <= end && state->end > end) { in set_extent_bit()
1255 *failed_start = start; in set_extent_bit()
1279 if (start > end) in set_extent_bit()
1294 int btrfs_set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_set_extent_bit() argument
1297 return set_extent_bit(tree, start, end, bits, NULL, NULL, cached_state, NULL); in btrfs_set_extent_bit()
1304 * @start: the start offset in bytes
1318 int btrfs_convert_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_convert_extent_bit() argument
1331 btrfs_debug_check_extent_io_range(tree, start, end); in btrfs_convert_extent_bit()
1332 trace_btrfs_convert_extent_bit(tree, start, end - start + 1, bits, in btrfs_convert_extent_bit()
1352 if (state->start <= start && state->end > start && in btrfs_convert_extent_bit()
1361 state = tree_search_for_insert(tree, start, &p, &parent); in btrfs_convert_extent_bit()
1368 prealloc->start = start; in btrfs_convert_extent_bit()
1376 last_start = state->start; in btrfs_convert_extent_bit()
1385 if (state->start == start && state->end <= end) { in btrfs_convert_extent_bit()
1391 start = last_end + 1; in btrfs_convert_extent_bit()
1392 if (state && state->start == start && !need_resched()) in btrfs_convert_extent_bit()
1412 if (state->start < start) { in btrfs_convert_extent_bit()
1418 ret = split_state(tree, state, prealloc, start); in btrfs_convert_extent_bit()
1430 start = last_end + 1; in btrfs_convert_extent_bit()
1431 if (state && state->start == start && !need_resched()) in btrfs_convert_extent_bit()
1443 if (state->start > start) { in btrfs_convert_extent_bit()
1456 prealloc->start = start; in btrfs_convert_extent_bit()
1471 start = inserted_state->end + 1; in btrfs_convert_extent_bit()
1474 if (start > end) in btrfs_convert_extent_bit()
1483 * need to unlock and start search again. If it's not contiguous in btrfs_convert_extent_bit()
1496 if (state->start <= end && state->end > end) { in btrfs_convert_extent_bit()
1518 if (start > end) in btrfs_convert_extent_bit()
1533 * Find the first range that has @bits not set. This range could start before
1534 * @start.
1537 * @start: offset at/after which the found extent should start
1547 void btrfs_find_first_clear_extent_bit(struct extent_io_tree *tree, u64 start, in btrfs_find_first_clear_extent_bit() argument
1557 state = tree_search_prev_next(tree, start, &prev, &next); in btrfs_find_first_clear_extent_bit()
1568 * We are past the last allocated chunk, set start at in btrfs_find_first_clear_extent_bit()
1579 * At this point 'state' either contains 'start' or start is in btrfs_find_first_clear_extent_bit()
1582 if (in_range(start, state->start, state->end - state->start + 1)) { in btrfs_find_first_clear_extent_bit()
1587 * start in btrfs_find_first_clear_extent_bit()
1589 start = state->end + 1; in btrfs_find_first_clear_extent_bit()
1592 * 'start' falls within a range that doesn't in btrfs_find_first_clear_extent_bit()
1593 * have the bits set, so take its start as the in btrfs_find_first_clear_extent_bit()
1598 * start in btrfs_find_first_clear_extent_bit()
1600 *start_ret = state->start; in btrfs_find_first_clear_extent_bit()
1607 * start in btrfs_find_first_clear_extent_bit()
1613 * start in btrfs_find_first_clear_extent_bit()
1624 * Find the longest stretch from start until an entry which has the in btrfs_find_first_clear_extent_bit()
1628 if (state->end >= start && !(state->state & bits)) { in btrfs_find_first_clear_extent_bit()
1631 *end_ret = state->start - 1; in btrfs_find_first_clear_extent_bit()
1645 * @start: The start offset of the range. This value is updated to the
1658 * called only once or if each call does not start where the
1663 * then @start is updated with the offset of the first byte with the bits set.
1666 u64 *start, u64 search_end, u64 max_bytes, in btrfs_count_range_bits() argument
1672 u64 cur_start = *start; in btrfs_count_range_bits()
1690 if (cached->start <= cur_start && cur_start <= cached->end) { in btrfs_count_range_bits()
1692 } else if (cached->start > cur_start) { in btrfs_count_range_bits()
1696 * The cached state starts after our search range's start. Check in btrfs_count_range_bits()
1700 * no previous state record, we can start from our cached state. in btrfs_count_range_bits()
1705 else if (prev->start <= cur_start && cur_start <= prev->end) in btrfs_count_range_bits()
1718 if (state->start > search_end) in btrfs_count_range_bits()
1720 if (contig && found && state->start > last + 1) in btrfs_count_range_bits()
1724 max(cur_start, state->start); in btrfs_count_range_bits()
1728 *start = max(cur_start, state->start); in btrfs_count_range_bits()
1753 bool btrfs_test_range_bit_exists(struct extent_io_tree *tree, u64 start, u64 end, u32 bit) in btrfs_test_range_bit_exists() argument
1761 state = tree_search(tree, start); in btrfs_test_range_bit_exists()
1763 if (state->start > end) in btrfs_test_range_bit_exists()
1779 void btrfs_get_range_bits(struct extent_io_tree *tree, u64 start, u64 end, u32 *bits, in btrfs_get_range_bits() argument
1785 * The cached state is currently mandatory and not used to start the in btrfs_get_range_bits()
1794 state = tree_search(tree, start); in btrfs_get_range_bits()
1795 if (state && state->start < end) { in btrfs_get_range_bits()
1800 if (state->start > end) in btrfs_get_range_bits()
1814 * Check if the whole range [@start,@end) contains the single @bit set.
1816 bool btrfs_test_range_bit(struct extent_io_tree *tree, u64 start, u64 end, u32 bit, in btrfs_test_range_bit() argument
1823 ASSERT(start < end); in btrfs_test_range_bit()
1826 if (cached && extent_state_in_tree(cached) && cached->start <= start && in btrfs_test_range_bit()
1827 cached->end > start) in btrfs_test_range_bit()
1830 state = tree_search(tree, start); in btrfs_test_range_bit()
1832 if (state->start > start) { in btrfs_test_range_bit()
1845 /* Next state must start where this one ends. */ in btrfs_test_range_bit()
1846 start = state->end + 1; in btrfs_test_range_bit()
1858 int btrfs_set_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_set_record_extent_bits() argument
1868 return set_extent_bit(tree, start, end, bits, NULL, NULL, NULL, changeset); in btrfs_set_record_extent_bits()
1871 int btrfs_clear_record_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_clear_record_extent_bits() argument
1880 return btrfs_clear_extent_bit_changeset(tree, start, end, bits, NULL, changeset); in btrfs_clear_record_extent_bits()
1883 bool btrfs_try_lock_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, in btrfs_try_lock_extent_bits() argument
1889 ret = set_extent_bit(tree, start, end, bits, &failed_start, NULL, cached, NULL); in btrfs_try_lock_extent_bits()
1891 if (failed_start > start) in btrfs_try_lock_extent_bits()
1892 btrfs_clear_extent_bit(tree, start, failed_start - 1, in btrfs_try_lock_extent_bits()
1900 * Either insert or lock state struct between start and end use mask to tell
1903 int btrfs_lock_extent_bits(struct extent_io_tree *tree, u64 start, u64 end, u32 bits, in btrfs_lock_extent_bits() argument
1910 ret = set_extent_bit(tree, start, end, bits, &failed_start, in btrfs_lock_extent_bits()
1913 if (failed_start != start) in btrfs_lock_extent_bits()
1914 btrfs_clear_extent_bit(tree, start, failed_start - 1, in btrfs_lock_extent_bits()
1918 ret = set_extent_bit(tree, start, end, bits, &failed_start, in btrfs_lock_extent_bits()