/linux/mm/ |
A D | nommu.c | 588 } else if (vma->vm_end < pvma->vm_end) in add_vma_to_mm() 590 else if (vma->vm_end > pvma->vm_end) { in add_vma_to_mm() 680 if (vma->vm_end > addr) { in find_vma() 731 if (vma->vm_end == end) { in find_vma_exact() 1041 region->vm_end = vma->vm_end = 0; in do_mmap_private() 1171 vma->vm_end = 0; in do_mmap() 1206 vma->vm_end = region->vm_end = addr + len; in do_mmap() 1370 region->vm_top = region->vm_end = new->vm_end = addr; in split_vma() 1386 vma->vm_region->vm_end = vma->vm_end = addr; in split_vma() 1411 vma->vm_end = from; in shrink_vma() [all …]
|
A D | mmap.c | 367 pend = vma->vm_end; in browse_rb() 788 end = next->vm_end; in __vma_adjust() 882 if (end != vma->vm_end) { in __vma_adjust() 883 vma->vm_end = end; in __vma_adjust() 990 end = next->vm_end; in __vma_adjust() 1773 vma->vm_end = addr + len; in mmap_region() 2358 vma->vm_end - size; in acct_stack_growth() 2445 vma->vm_end = address; in expand_upwards() 2718 new->vm_end = addr; in __split_vma() 3074 vma->vm_end = addr + len; in do_brk_flags() [all …]
|
A D | mlock.c | 527 if (end != vma->vm_end) { in mlock_fixup() 588 tmp = vma->vm_end; in apply_vma_lock_flags() 595 if (nstart < prev->vm_end) in apply_vma_lock_flags() 596 nstart = prev->vm_end; in apply_vma_lock_flags() 630 if (start >= vma->vm_end) in count_mm_mlocked_page_nr() 637 if (start + len < vma->vm_end) { in count_mm_mlocked_page_nr() 641 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr() 769 mlock_fixup(vma, &prev, vma->vm_start, vma->vm_end, newflags); in apply_mlockall_flags()
|
A D | madvise.c | 159 if (end != vma->vm_end) { in madvise_behavior() 720 if (range.start >= vma->vm_end) in madvise_free_single_vma() 722 range.end = min(vma->vm_end, end_addr); in madvise_free_single_vma() 801 if (end > vma->vm_end) { in madvise_dontneed_free() 814 end = vma->vm_end; in madvise_dontneed_free() 845 if (!vma || start >= vma->vm_end) { in madvise_populate() 851 tmp_end = min_t(unsigned long, end, vma->vm_end); in madvise_populate() 1197 tmp = vma->vm_end; in do_madvise() 1206 if (prev && start < prev->vm_end) in do_madvise() 1207 start = prev->vm_end; in do_madvise()
|
A D | vmacache.c | 79 if (vma->vm_start <= addr && vma->vm_end > addr) { in vmacache_find() 107 if (vma && vma->vm_start == start && vma->vm_end == end) { in vmacache_find_exact()
|
A D | mremap.c | 597 if (!err && vma->vm_end != old_addr + old_len) in move_vma() 660 excess = vma->vm_end - vma->vm_start - old_len; in move_vma() 662 old_addr + old_len < vma->vm_end) in move_vma() 691 vma->vm_end == (old_addr + old_len)) in move_vma() 751 if (old_len > vma->vm_end - addr) in vma_to_resize() 871 unsigned long end = vma->vm_end + delta; in vma_expandable() 872 if (end < vma->vm_end) /* overflow */ in vma_expandable() 1010 if (old_len == vma->vm_end - addr) { in SYSCALL_DEFINE5()
|
A D | msync.c | 90 fend = fstart + (min(end, vma->vm_end) - start) - 1; in SYSCALL_DEFINE3() 91 start = vma->vm_end; in SYSCALL_DEFINE3()
|
A D | internal.h | 400 munlock_vma_pages_range(vma, vma->vm_start, vma->vm_end); in munlock_vma_pages_all() 442 if (address < vma->vm_start || address >= vma->vm_end) in vma_address() 469 if (address < vma->vm_start || address > vma->vm_end) in vma_address_end() 470 address = vma->vm_end; in vma_address_end()
|
A D | mprotect.c | 482 if (end != vma->vm_end) { in mprotect_fixup() 578 end = vma->vm_end; in do_mprotect_pkey() 629 tmp = vma->vm_end; in do_mprotect_pkey() 645 if (nstart < prev->vm_end) in do_mprotect_pkey() 646 nstart = prev->vm_end; in do_mprotect_pkey()
|
/linux/fs/proc/ |
A D | task_nommu.c | 37 size += region->vm_end - region->vm_start; in task_mem() 39 size = vma->vm_end - vma->vm_start; in task_mem() 48 slack = region->vm_end - vma->vm_end; in task_mem() 92 vsize += vma->vm_end - vma->vm_start; in task_vsize() 114 size += region->vm_end - region->vm_start; in task_statm() 139 vma->vm_end >= mm->start_stack; in is_stack() 168 vma->vm_end, in nommu_vma_show()
|
/linux/mm/damon/ |
A D | vaddr-test.h | 35 gap = vmas[j].vm_start - vmas[j - 1].vm_end; in __link_vmas() 78 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas() 79 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas() 80 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas() 81 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas() 82 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas() 83 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
|
/linux/drivers/gpu/drm/ |
A D | drm_vm.c | 78 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot() 240 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_shm_close() 395 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_open_locked() 421 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_close_locked() 465 unsigned long length = vma->vm_end - vma->vm_start; in drm_mmap_dma() 470 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_dma() 532 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_locked() 559 if (map->size < vma->vm_end - vma->vm_start) in drm_mmap_locked() 600 vma->vm_end - vma->vm_start, in drm_mmap_locked() 606 vma->vm_start, vma->vm_end, (unsigned long long)(map->offset + offset)); in drm_mmap_locked() [all …]
|
/linux/drivers/char/ |
A D | mspec.c | 76 unsigned long vm_end; /* Original (unsplit) end. */ member 114 last_index = (vdata->vm_end - vdata->vm_start) >> PAGE_SHIFT; in mspec_close() 203 vdata->vm_end = vma->vm_end; in mspec_mmap()
|
/linux/scripts/coccinelle/api/ |
A D | vma_pages.cocci | 22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT 32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT) 44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
|
/linux/arch/csky/include/asm/ |
A D | tlb.h | 11 flush_cache_range(vma, (vma)->vm_start, (vma)->vm_end); \ 17 flush_tlb_range(vma, (vma)->vm_start, (vma)->vm_end); \
|
/linux/arch/x86/um/ |
A D | mem_32.c | 18 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init() 49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
|
/linux/drivers/media/v4l2-core/ |
A D | videobuf-vmalloc.c | 56 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open() 68 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close() 233 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper() 254 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
|
A D | videobuf-dma-contig.c | 68 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open() 80 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close() 178 if ((untagged_baddr + mem->size) > vma->vm_end) in videobuf_dma_contig_user_get() 327 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
|
/linux/drivers/media/common/videobuf2/ |
A D | videobuf2-memops.c | 94 vma->vm_end); in vb2_common_vm_open() 112 vma->vm_end); in vb2_common_vm_close()
|
A D | frame_vector.c | 71 while (ret < nr_frames && start + PAGE_SIZE <= vma->vm_end) { in get_vaddr_frames() 82 if (start < vma->vm_end) in get_vaddr_frames()
|
/linux/drivers/soc/qcom/ |
A D | rmtfs_mem.c | 136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap() 139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap() 140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap() 148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
|
/linux/drivers/dax/ |
A D | device.c | 35 if (vma->vm_start & mask || vma->vm_end & mask) { in check_vma() 38 current->comm, func, vma->vm_start, vma->vm_end, in check_vma() 131 (pmd_addr + PMD_SIZE) > vmf->vma->vm_end) in __dev_dax_pmd_fault() 173 (pud_addr + PUD_SIZE) > vmf->vma->vm_end) in __dev_dax_pud_fault() 207 vmf->vma->vm_start, vmf->vma->vm_end, pe_size); in dev_dax_huge_fault()
|
/linux/include/trace/events/ |
A D | fs_dax.h | 17 __field(unsigned long, vm_end) 30 __entry->vm_end = vmf->vma->vm_end; 47 __entry->vm_end,
|
/linux/fs/ |
A D | exec.c | 271 vma->vm_end = STACK_TOP_MAX; in __bprm_mm_init() 272 vma->vm_start = vma->vm_end - PAGE_SIZE; in __bprm_mm_init() 282 bprm->p = vma->vm_end - sizeof(void *); in __bprm_mm_init() 680 unsigned long old_end = vma->vm_end; in shift_arg_pages() 766 if (vma->vm_end - vma->vm_start > stack_base) in setup_arg_pages() 773 bprm->p = vma->vm_end - stack_shift; in setup_arg_pages() 782 stack_shift = vma->vm_end - stack_top; in setup_arg_pages() 809 ret = mprotect_fixup(vma, &prev, vma->vm_start, vma->vm_end, in setup_arg_pages() 831 stack_size = vma->vm_end - vma->vm_start; in setup_arg_pages() 841 stack_base = vma->vm_end + stack_expand; in setup_arg_pages() [all …]
|
/linux/arch/powerpc/kvm/ |
A D | book3s_hv_uvmem.c | 409 ret = ksm_madvise(vma, vma->vm_start, vma->vm_end, in kvmppc_memslot_page_merge() 415 start = vma->vm_end; in kvmppc_memslot_page_merge() 416 } while (end > vma->vm_end); in kvmppc_memslot_page_merge() 617 if (!vma || addr >= vma->vm_end) { in kvmppc_uvmem_drop_pages() 802 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_uv_migrate_mem_slot() 963 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_in() 1063 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_out()
|