/linux/mm/ |
H A D | vma_exec.c | 36 unsigned long new_start = old_start - shift; in relocate_vma_down() local 38 VMA_ITERATOR(vmi, mm, new_start); in relocate_vma_down() 39 VMG_STATE(vmg, mm, &vmi, new_start, old_end, 0, vma->vm_pgoff); in relocate_vma_down() 42 PAGETABLE_MOVE(pmc, vma, vma, old_start, new_start, length); in relocate_vma_down() 44 BUG_ON(new_start > new_end); in relocate_vma_down() 55 * cover the whole range: [new_start, old_end) in relocate_vma_down() 91 return vma_shrink(&vmi, vma, new_start, new_end, vma->vm_pgoff); in relocate_vma_down()
|
H A D | readahead.c | 740 * @new_start: The revised start 756 loff_t new_start, size_t new_len) in readahead_expand() 765 new_index = new_start / PAGE_SIZE; in readahead_expand() 798 new_len += new_start - readahead_pos(ractl); in readahead_expand() 742 readahead_expand(struct readahead_control * ractl,loff_t new_start,size_t new_len) readahead_expand() argument
|
H A D | vma.c | 2949 unsigned long new_start; in acct_stack_growth() local 2964 new_start = (vma->vm_flags & VM_GROWSUP) ? vma->vm_start : in acct_stack_growth() 2966 if (is_hugepage_only_range(vma->vm_mm, new_start, size)) in acct_stack_growth()
|
/linux/arch/arm/mm/ |
H A D | cache-l2x0.c | 1609 unsigned long new_start, new_end; in bcm_inv_range() local 1616 new_start = bcm_l2_phys_addr(start); in bcm_inv_range() 1621 l2c210_inv_range(new_start, new_end); in bcm_inv_range() 1628 l2c210_inv_range(new_start, in bcm_inv_range() 1636 unsigned long new_start, new_end; in bcm_clean_range() local 1643 new_start = bcm_l2_phys_addr(start); in bcm_clean_range() 1648 l2c210_clean_range(new_start, new_end); in bcm_clean_range() 1655 l2c210_clean_range(new_start, in bcm_clean_range() 1663 unsigned long new_start, new_end; in bcm_flush_range() local 1675 new_start = bcm_l2_phys_addr(start); in bcm_flush_range() [all …]
|
/linux/drivers/nvdimm/ |
H A D | badrange.c | 150 u64 new_start = clr_end + 1; in badrange_forget() local 151 u64 new_len = bre_end - new_start + 1; in badrange_forget() 154 alloc_and_append_badrange_entry(badrange, new_start, in badrange_forget()
|
/linux/kernel/bpf/ |
H A D | log.c | 93 u64 new_end, new_start; in bpf_verifier_vlog() local 98 new_start = new_end - log->len_total; in bpf_verifier_vlog() 100 new_start = log->start_pos; in bpf_verifier_vlog() 102 log->start_pos = new_start; in bpf_verifier_vlog()
|
/linux/fs/bcachefs/ |
H A D | btree_update.c | 175 struct bpos new_start = bkey_start_pos(new.k); in bch2_trans_update_extent_overwrite() local 176 unsigned front_split = bkey_lt(bkey_start_pos(old.k), new_start); in bch2_trans_update_extent_overwrite() 197 bch2_cut_back(new_start, update); in bch2_trans_update_extent_overwrite() 213 bch2_cut_front(new_start, update); in bch2_trans_update_extent_overwrite()
|
H A D | reflink.c | 225 struct bpos new_start = bkey_start_pos(&new->k); in bch2_indirect_extent_missing_error() local 229 new_start.offset += missing_start - live_start; in bch2_indirect_extent_missing_error() 233 bch2_cut_front(new_start, &new->k_i); in bch2_indirect_extent_missing_error()
|
/linux/drivers/md/dm-vdo/indexer/ |
H A D | delta-index.c | 158 u64 new_start; in rebalance_delta_zone() local 163 new_start = delta_zone->new_offsets[first]; in rebalance_delta_zone() 164 if (delta_list->start != new_start) { in rebalance_delta_zone() 169 delta_list->start = new_start; in rebalance_delta_zone() 184 new_start = delta_zone->new_offsets[middle]; in rebalance_delta_zone() 190 if (new_start > delta_list->start) { in rebalance_delta_zone()
|
/linux/fs/orangefs/ |
H A D | inode.c | 224 loff_t new_start = readahead_pos(rac); in orangefs_readahead() local 237 readahead_expand(rac, new_start, new_len); in orangefs_readahead()
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_svm.c | 955 uint64_t new_start, uint64_t new_n, uint64_t *new_vram_pages) in svm_range_split_array() argument 966 d = (new_start - old_start) * size; in svm_range_split_array() 971 d = (new_start == old_start) ? new_n * size : 0; in svm_range_split_array() 1159 svm_range_split_head(struct svm_range *prange, uint64_t new_start, in svm_range_split_head() argument 1163 int r = svm_range_split(prange, new_start, prange->last, &head); in svm_range_split_head() 1167 if (!IS_ALIGNED(new_start, 1UL << prange->granularity)) in svm_range_split_head()
|
/linux/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_submission.c | 1182 __extend_last_switch(struct intel_guc *guc, u64 *prev_start, u32 new_start) in __extend_last_switch() argument 1187 if (new_start == lower_32_bits(*prev_start)) in __extend_last_switch() 1203 if (new_start < gt_stamp_last && in __extend_last_switch() 1204 (new_start - gt_stamp_last) <= POLL_TIME_CLKS) in __extend_last_switch() 1207 if (new_start > gt_stamp_last && in __extend_last_switch() 1208 (gt_stamp_last - new_start) <= POLL_TIME_CLKS && gt_stamp_hi) in __extend_last_switch() 1211 *prev_start = ((u64)gt_stamp_hi << 32) | new_start; in __extend_last_switch()
|
/linux/drivers/net/ethernet/netronome/nfp/bpf/ |
H A D | jit.c | 4330 s16 new_start = range_start; in nfp_bpf_opt_pkt_cache() local 4336 new_start = off; in nfp_bpf_opt_pkt_cache() 4349 if (new_end - new_start <= 64) { in nfp_bpf_opt_pkt_cache() 4351 range_start = new_start; in nfp_bpf_opt_pkt_cache()
|
/linux/fs/ocfs2/ |
H A D | alloc.c | 5835 unsigned int new_start) in ocfs2_truncate_log_can_coalesce() argument 5848 return current_tail == new_start; in ocfs2_truncate_log_can_coalesce()
|