Searched refs:need_flush (Results 1 – 15 of 15) sorted by relevance
/linux/arch/x86/mm/ |
A D | tlb.c | 214 u16 *new_asid, bool *need_flush) in choose_new_asid() argument 220 *need_flush = true; in choose_new_asid() 233 *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < in choose_new_asid() 247 *need_flush = true; in choose_new_asid() 276 static void load_new_mm_cr3(pgd_t *pgdir, u16 new_asid, bool need_flush) in load_new_mm_cr3() argument 280 if (need_flush) { in load_new_mm_cr3() 496 bool need_flush; in switch_mm_irqs_off() local 591 need_flush = true; in switch_mm_irqs_off() 617 choose_new_asid(next, next_tlb_gen, &new_asid, &need_flush); in switch_mm_irqs_off() 624 if (need_flush) { in switch_mm_irqs_off()
|
/linux/arch/arm/mm/ |
A D | pmsa-v7.c | 368 unsigned int subregions, bool need_flush) in mpu_setup_region() argument 387 if (need_flush) in mpu_setup_region() 441 bool need_flush = region == PMSAv7_RAM_REGION; in pmsav7_setup() local 448 xip[i].subreg, need_flush); in pmsav7_setup()
|
/linux/arch/x86/kernel/ |
A D | amd_gart_64.c | 89 static bool need_flush; /* global flush state. set for each gart wrap */ variable 106 need_flush = true; in alloc_iommu() 115 need_flush = true; in alloc_iommu() 119 need_flush = true; in alloc_iommu() 144 if (need_flush) { in flush_gart() 146 need_flush = false; in flush_gart()
|
/linux/drivers/gpu/drm/etnaviv/ |
A D | etnaviv_buffer.c | 350 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; in etnaviv_buffer_queue() local 368 if (need_flush || switch_context) { in etnaviv_buffer_queue() 375 if (need_flush) { in etnaviv_buffer_queue() 404 if (need_flush) { in etnaviv_buffer_queue()
|
/linux/arch/sparc/kernel/ |
A D | iommu-common.c | 19 static inline bool need_flush(struct iommu_map_table *iommu) in need_flush() function 206 (n < pool->hint || need_flush(iommu))) { in iommu_tbl_range_alloc()
|
/linux/mm/ |
A D | highmem.c | 160 int need_flush = 0; in flush_all_zero_pkmaps() local 191 need_flush = 1; in flush_all_zero_pkmaps() 193 if (need_flush) in flush_all_zero_pkmaps()
|
/linux/drivers/infiniband/hw/mlx4/ |
A D | cm.c | 528 int need_flush = 0; in mlx4_ib_cm_paravirt_clean() local 536 need_flush |= !cancel_delayed_work(&map->timeout); in mlx4_ib_cm_paravirt_clean() 542 if (need_flush) in mlx4_ib_cm_paravirt_clean()
|
/linux/arch/arm64/kvm/hyp/ |
A D | pgtable.c | 898 bool need_flush = false; in stage2_unmap_walker() local 914 need_flush = !stage2_has_fwb(pgt); in stage2_unmap_walker() 924 if (need_flush) { in stage2_unmap_walker()
|
/linux/fs/ceph/ |
A D | snap.c | 535 capsnap->need_flush = true; in ceph_queue_cap_snap() 547 capsnap->need_flush ? "" : "no_flush"); in ceph_queue_cap_snap()
|
A D | super.h | 224 bool need_flush; member
|
A D | caps.c | 1426 if (capsnap->need_flush) { in __prep_cap() 1548 BUG_ON(!capsnap->need_flush); in __ceph_flush_snaps() 3004 if (!capsnap->need_flush && in ceph_try_drop_cap_snap()
|
/linux/drivers/md/ |
A D | dm-writecache.c | 997 bool need_flush = false; in writecache_resume() local 1065 need_flush = true; in writecache_resume() 1086 need_flush = true; in writecache_resume() 1093 if (need_flush) { in writecache_resume()
|
A D | raid10.c | 4746 int need_flush = 0; in reshape_request() local 4787 need_flush = 1; in reshape_request() 4809 need_flush = 1; in reshape_request() 4819 if (need_flush || in reshape_request()
|
/linux/drivers/gpu/drm/i915/gem/selftests/ |
A D | i915_gem_context.c | 455 unsigned int n, m, need_flush; in cpu_fill() local 459 err = i915_gem_object_prepare_write(obj, &need_flush); in cpu_fill()
|
/linux/arch/x86/kvm/mmu/ |
A D | mmu.c | 1457 int need_flush = 0; in kvm_set_pte_rmapp() local 1469 need_flush = 1; in kvm_set_pte_rmapp() 1483 if (need_flush && kvm_available_flush_tlb_with_range()) { in kvm_set_pte_rmapp() 1488 return need_flush; in kvm_set_pte_rmapp()
|
Completed in 71 milliseconds