Home
last modified time | relevance | path

Searched refs:need_flush (Results 1 – 14 of 14) sorted by relevance

/linux/arch/loongarch/include/asm/
H A Dmmu_context.h52 get_new_mmu_context(struct mm_struct *mm, unsigned long cpu, bool *need_flush) in get_new_mmu_context() argument
57 *need_flush = true; /* start new asid cycle */ in get_new_mmu_context()
91 bool need_flush = false; in switch_mm_irqs_off() local
96 get_new_mmu_context(next, cpu, &need_flush); in switch_mm_irqs_off()
103 if (need_flush) in switch_mm_irqs_off()
151 bool need_flush = false; in drop_mmu_context() local
154 get_new_mmu_context(mm, cpu, &need_flush); in drop_mmu_context()
157 if (need_flush) in drop_mmu_context()
/linux/arch/arm/mm/
H A Dpmsa-v7.c368 unsigned int subregions, bool need_flush) in mpu_setup_region() argument
387 if (need_flush) in mpu_setup_region()
441 bool need_flush = region == PMSAv7_RAM_REGION; in pmsav7_setup() local
448 xip[i].subreg, need_flush); in pmsav7_setup()
/linux/arch/x86/mm/
H A Dtlb.c222 unsigned int need_flush : 1; member
232 ns.need_flush = 1; in choose_new_asid()
245 ns.need_flush = 0; in choose_new_asid()
259 ns.need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < next_tlb_gen); in choose_new_asid()
272 ns.need_flush = true; in choose_new_asid()
566 bool need_flush) in load_new_mm_cr3() argument
570 if (need_flush) { in load_new_mm_cr3()
901 ns.need_flush = true; in switch_mm_irqs_off()
947 if (ns.need_flush) { in switch_mm_irqs_off()
/linux/arch/x86/kernel/
H A Damd_gart_64.c87 static bool need_flush; /* global flush state. set for each gart wrap */ variable
104 need_flush = true; in alloc_iommu()
113 need_flush = true; in alloc_iommu()
117 need_flush = true; in alloc_iommu()
142 if (need_flush) { in flush_gart()
144 need_flush = false; in flush_gart()
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_buffer.c352 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; in etnaviv_buffer_queue() local
370 if (need_flush || switch_context) { in etnaviv_buffer_queue()
377 if (need_flush) { in etnaviv_buffer_queue()
406 if (need_flush) { in etnaviv_buffer_queue()
/linux/arch/sparc/kernel/
H A Diommu-common.c19 static inline bool need_flush(struct iommu_map_table *iommu) in need_flush() function
206 (n < pool->hint || need_flush(iommu))) { in iommu_tbl_range_alloc()
/linux/mm/
H A Dhighmem.c199 int need_flush = 0; in flush_all_zero_pkmaps() local
232 need_flush = 1; in flush_all_zero_pkmaps()
234 if (need_flush) in flush_all_zero_pkmaps()
/linux/drivers/infiniband/hw/mlx4/
H A Dcm.c529 int need_flush = 0; in mlx4_ib_cm_paravirt_clean() local
537 need_flush |= !cancel_delayed_work(&map->timeout); in mlx4_ib_cm_paravirt_clean()
543 if (need_flush) in mlx4_ib_cm_paravirt_clean()
/linux/arch/arm64/kvm/hyp/
H A Dpgtable.c1135 bool need_flush = false; in stage2_unmap_walker() local
1151 need_flush = !stage2_has_fwb(pgt); in stage2_unmap_walker()
1161 if (need_flush && mm_ops->dcache_clean_inval_poc) in stage2_unmap_walker()
/linux/fs/ceph/
H A Dsnap.c583 capsnap->need_flush = true; in ceph_queue_cap_snap()
595 ceph_cap_string(dirty), capsnap->need_flush ? "" : "no_flush"); in ceph_queue_cap_snap()
H A Dsuper.h262 bool need_flush; member
H A Dcaps.c1476 if (capsnap->need_flush) { in __prep_cap()
1641 BUG_ON(!capsnap->need_flush); in __ceph_flush_snaps()
3204 if (!capsnap->need_flush && in ceph_try_drop_cap_snap()
/linux/drivers/md/
H A Ddm-writecache.c999 bool need_flush = false; in writecache_resume() local
1070 need_flush = true; in writecache_resume()
1091 need_flush = true; in writecache_resume()
1098 if (need_flush) { in writecache_resume()
/linux/drivers/net/vmxnet3/
H A Dvmxnet3_drv.c1617 bool need_flush = false; in vmxnet3_rq_rx_complete() local
1678 need_flush |= act == XDP_REDIRECT; in vmxnet3_rq_rx_complete()
1735 need_flush |= act == XDP_REDIRECT; in vmxnet3_rq_rx_complete()
2010 if (need_flush) in vmxnet3_rq_rx_complete()