/linux/drivers/gpu/drm/i915/ |
H A D | i915_scatterlist.c | 86 u64 block_size, offset, prev_end; in i915_rsgt_from_mm_node() local 114 prev_end = (resource_size_t)-1; in i915_rsgt_from_mm_node() 121 if (offset != prev_end || sg->length >= max_segment) { in i915_rsgt_from_mm_node() 140 prev_end = offset; in i915_rsgt_from_mm_node() 176 resource_size_t prev_end; in i915_rsgt_from_buddy_resource() local 200 prev_end = (resource_size_t)-1; in i915_rsgt_from_buddy_resource() 211 if (offset != prev_end || sg->length >= max_segment) { in i915_rsgt_from_buddy_resource() 230 prev_end = offset; in i915_rsgt_from_buddy_resource()
|
/linux/drivers/parisc/ |
H A D | iommu-helpers.h | 132 unsigned long prev_end, sg_start; in iommu_coalesce_chunks() local 134 prev_end = (unsigned long)sg_virt(startsg) + in iommu_coalesce_chunks() 159 if (unlikely((prev_end != sg_start) || in iommu_coalesce_chunks() 160 ((prev_end | sg_start) & ~PAGE_MASK))) in iommu_coalesce_chunks()
|
/linux/mm/ |
H A D | numa_memblks.c | 490 u64 prev_end; in numa_fill_memblks() local 520 prev_end = blk[0]->end; in numa_fill_memblks() 524 if (prev_end >= curr->start) { in numa_fill_memblks() 525 if (prev_end < curr->end) in numa_fill_memblks() 526 prev_end = curr->end; in numa_fill_memblks() 528 curr->start = prev_end; in numa_fill_memblks() 529 prev_end = curr->end; in numa_fill_memblks()
|
H A D | memblock.c | 2068 unsigned long start, end, prev_end = 0; in free_unused_memmap() local 2085 start = min(start, ALIGN(prev_end, PAGES_PER_SECTION)); in free_unused_memmap() 2098 if (prev_end && prev_end < start) in free_unused_memmap() 2099 free_memmap(prev_end, start); in free_unused_memmap() 2106 prev_end = pageblock_align(end); in free_unused_memmap() 2110 if (!IS_ALIGNED(prev_end, PAGES_PER_SECTION)) { in free_unused_memmap() 2111 prev_end = pageblock_align(end); in free_unused_memmap() 2112 free_memmap(prev_end, ALIGN(prev_end, PAGES_PER_SECTIO in free_unused_memmap() [all...] |
/linux/arch/x86/virt/vmx/tdx/ |
H A D | tdx.c | 760 u64 prev_end; in tdmr_populate_rsvd_holes() local 767 prev_end = tdmr->base; in tdmr_populate_rsvd_holes() 786 if (start <= prev_end) { in tdmr_populate_rsvd_holes() 787 prev_end = end; in tdmr_populate_rsvd_holes() 792 ret = tdmr_add_rsvd_area(tdmr, rsvd_idx, prev_end, in tdmr_populate_rsvd_holes() 793 start - prev_end, in tdmr_populate_rsvd_holes() 798 prev_end = end; in tdmr_populate_rsvd_holes() 802 if (prev_end < tdmr_end(tdmr)) { in tdmr_populate_rsvd_holes() 803 ret = tdmr_add_rsvd_area(tdmr, rsvd_idx, prev_end, in tdmr_populate_rsvd_holes() 804 tdmr_end(tdmr) - prev_end, in tdmr_populate_rsvd_holes()
|
/linux/drivers/iio/imu/bno055/ |
H A D | bno055.c | 1443 int xfer_start, start, end, prev_end; in bno055_trigger_handler() local 1482 (prev_end <= BNO055_SCAN_QUATERNION)) ? 3 : 0; in bno055_trigger_handler() 1485 thr_hit = (start - prev_end + quat_extra_len) > in bno055_trigger_handler() 1496 prev_end - xfer_start, in bno055_trigger_handler() 1504 prev_end = end; in bno055_trigger_handler() 1513 prev_end - xfer_start, in bno055_trigger_handler()
|
/linux/fs/btrfs/ |
H A D | tree-checker.c | 330 u64 prev_end; in check_extent_data_item() local 334 prev_end = file_extent_end(leaf, prev_key, prev_fi); in check_extent_data_item() 335 if (unlikely(prev_end > key->offset)) { in check_extent_data_item() 338 prev_end, key->offset); in check_extent_data_item() 1599 u64 prev_end = prev_key->objectid; in check_extent_item() local 1602 prev_end += fs_info->nodesize; in check_extent_item() 1604 prev_end += prev_key->offset; in check_extent_item() 1606 if (unlikely(prev_end > key->objectid)) { in check_extent_item()
|
/linux/fs/nfs/ |
H A D | pagelist.c | 1047 size_t prev_end = prev->wb_pgbase + prev->wb_bytes; in nfs_page_is_contiguous() local 1052 return prev_end == nfs_page_max_length(prev); in nfs_page_is_contiguous() 1053 if (req->wb_pgbase == prev_end) { in nfs_page_is_contiguous()
|
/linux/drivers/accel/habanalabs/common/ |
H A D | memory.c | 601 u64 tmp_hint_addr, valid_start, valid_size, prev_start, prev_end, in get_va_block() local 710 prev_end = reserved_valid_start - 1; in get_va_block() 727 rc = add_va_block_locked(hdev, &va_range->list, prev_start, prev_end); in get_va_block()
|