/Linux-v5.4/arch/x86/mm/ |
D | tlb.c | 77 u16 *new_asid, bool *need_flush) in choose_new_asid() argument 83 *need_flush = true; in choose_new_asid() 96 *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < in choose_new_asid() 110 *need_flush = true; in choose_new_asid() 113 static void load_new_mm_cr3(pgd_t *pgdir, u16 new_asid, bool need_flush) in load_new_mm_cr3() argument 117 if (need_flush) { in load_new_mm_cr3() 283 bool need_flush; in switch_mm_irqs_off() local 371 need_flush = true; in switch_mm_irqs_off() 407 choose_new_asid(next, next_tlb_gen, &new_asid, &need_flush); in switch_mm_irqs_off() 414 if (need_flush) { in switch_mm_irqs_off()
|
/Linux-v5.4/arch/arm/mm/ |
D | pmsa-v7.c | 367 unsigned int subregions, bool need_flush) in mpu_setup_region() argument 386 if (need_flush) in mpu_setup_region() 440 bool need_flush = region == PMSAv7_RAM_REGION; in pmsav7_setup() local 447 xip[i].subreg, need_flush); in pmsav7_setup()
|
/Linux-v5.4/arch/x86/kernel/ |
D | amd_gart_64.c | 89 static bool need_flush; /* global flush state. set for each gart wrap */ variable 107 need_flush = true; in alloc_iommu() 116 need_flush = true; in alloc_iommu() 120 need_flush = true; in alloc_iommu() 145 if (need_flush) { in flush_gart() 147 need_flush = false; in flush_gart()
|
/Linux-v5.4/mm/ |
D | highmem.c | 166 int need_flush = 0; in flush_all_zero_pkmaps() local 197 need_flush = 1; in flush_all_zero_pkmaps() 199 if (need_flush) in flush_all_zero_pkmaps()
|
/Linux-v5.4/drivers/gpu/drm/etnaviv/ |
D | etnaviv_buffer.c | 325 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; in etnaviv_buffer_queue() local 341 if (need_flush || switch_context) { in etnaviv_buffer_queue() 348 if (need_flush) { in etnaviv_buffer_queue() 378 if (need_flush) { in etnaviv_buffer_queue()
|
/Linux-v5.4/arch/sparc/kernel/ |
D | iommu-common.c | 19 static inline bool need_flush(struct iommu_map_table *iommu) in need_flush() function 210 (n < pool->hint || need_flush(iommu))) { in iommu_tbl_range_alloc()
|
/Linux-v5.4/drivers/infiniband/hw/mlx4/ |
D | cm.c | 411 int need_flush = 0; in mlx4_ib_cm_paravirt_clean() local 419 need_flush |= !cancel_delayed_work(&map->timeout); in mlx4_ib_cm_paravirt_clean() 425 if (need_flush) in mlx4_ib_cm_paravirt_clean()
|
/Linux-v5.4/fs/xfs/ |
D | xfs_log.c | 1717 bool need_flush) in xlog_write_iclog() argument 1751 if (need_flush) in xlog_write_iclog() 1866 bool need_flush = true, split = false; in xlog_sync() local 1926 need_flush = false; in xlog_sync() 1930 xlog_write_iclog(log, iclog, bno, count, need_flush); in xlog_sync()
|
/Linux-v5.4/fs/ceph/ |
D | snap.c | 518 capsnap->need_flush = true; in ceph_queue_cap_snap() 530 capsnap->need_flush ? "" : "no_flush"); in ceph_queue_cap_snap()
|
D | super.h | 215 bool need_flush; member
|
D | caps.c | 1375 if (capsnap->need_flush) { in __send_cap() 1475 BUG_ON(!capsnap->need_flush); in __ceph_flush_snaps() 2861 if (!capsnap->need_flush && in ceph_try_drop_cap_snap()
|
/Linux-v5.4/drivers/md/ |
D | dm-writecache.c | 878 bool need_flush = false; in writecache_resume() local 931 need_flush = true; in writecache_resume() 952 need_flush = true; in writecache_resume() 959 if (need_flush) { in writecache_resume()
|
D | raid10.c | 4441 int need_flush = 0; in reshape_request() local 4482 need_flush = 1; in reshape_request() 4504 need_flush = 1; in reshape_request() 4514 if (need_flush || in reshape_request()
|
/Linux-v5.4/drivers/gpu/drm/i915/gem/selftests/ |
D | i915_gem_context.c | 218 unsigned int n, m, need_flush; in cpu_fill() local 221 err = i915_gem_object_prepare_write(obj, &need_flush); in cpu_fill()
|
/Linux-v5.4/arch/x86/kvm/ |
D | mmu.c | 1877 int need_flush = 0; in kvm_set_pte_rmapp() local 1890 need_flush = 1; in kvm_set_pte_rmapp() 1909 if (need_flush && kvm_available_flush_tlb_with_range()) { in kvm_set_pte_rmapp() 1914 return need_flush; in kvm_set_pte_rmapp()
|