Home
last modified time | relevance | path

Searched refs:vm_start (Results 1 – 25 of 293) sorted by relevance

12345678910>>...12

/linux/mm/
A Dnommu.c467 if (region->vm_start < pregion->vm_start) in add_nommu_region()
469 else if (region->vm_start > pregion->vm_start) in add_nommu_region()
583 if (vma->vm_start < pvma->vm_start) in add_vma_to_mm()
585 else if (vma->vm_start > pvma->vm_start) { in add_vma_to_mm()
1014 vma->vm_start = region->vm_start; in do_mmap_private()
1040 region->vm_start = vma->vm_start = 0; in do_mmap_private()
1205 vma->vm_start = region->vm_start = addr; in do_mmap()
1372 region->vm_start = new->vm_start = addr; in split_vma()
1383 vma->vm_region->vm_start = vma->vm_start = addr; in split_vma()
1413 vma->vm_start = to; in shrink_vma()
[all …]
A Dmmap.c343 vma->vm_start, prev); in browse_rb()
366 prev = vma->vm_start; in browse_rb()
626 if (vma->vm_start > end) in count_vma_pages_range()
879 vma->vm_start = start; in __vma_adjust()
1267 b->vm_pgoff == a->vm_pgoff + ((b->vm_start - a->vm_start) >> PAGE_SHIFT); in anon_vma_compatible()
1772 vma->vm_start = addr; in mmap_region()
1799 addr = vma->vm_start; in mmap_region()
2599 start = vma->vm_start; in find_extend_vma()
2720 new->vm_start = addr; in __split_vma()
3073 vma->vm_start = addr; in do_brk_flags()
[all …]
A Dmlock.c512 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in mlock_fixup()
521 if (start != vma->vm_start) { in mlock_fixup()
575 if (!vma || vma->vm_start > start) in apply_vma_lock_flags()
579 if (start > vma->vm_start) in apply_vma_lock_flags()
601 if (!vma || vma->vm_start != nstart) { in apply_vma_lock_flags()
632 if (start + len <= vma->vm_start) in count_mm_mlocked_page_nr()
635 if (start > vma->vm_start) in count_mm_mlocked_page_nr()
636 count -= (start - vma->vm_start); in count_mm_mlocked_page_nr()
638 count += start + len - vma->vm_start; in count_mm_mlocked_page_nr()
641 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr()
[all …]
A Dmremap.c595 if (vma->vm_start != old_addr) in move_vma()
660 excess = vma->vm_end - vma->vm_start - old_len; in move_vma()
661 if (old_addr > vma->vm_start && in move_vma()
690 if (new_vma != vma && vma->vm_start == old_addr && in move_vma()
758 pgoff = (addr - vma->vm_start) >> PAGE_SHIFT; in vma_to_resize()
853 ((addr - vma->vm_start) >> PAGE_SHIFT), in mremap_to()
874 if (vma->vm_next && vma->vm_next->vm_start < end) /* intersection */ in vma_expandable()
876 if (get_unmapped_area(NULL, vma->vm_start, end - vma->vm_start, in vma_expandable()
946 if (!vma || vma->vm_start > addr) { in SYSCALL_DEFINE5()
1022 if (vma_adjust(vma, vma->vm_start, addr + new_len, in SYSCALL_DEFINE5()
[all …]
A Dmsync.c73 if (start < vma->vm_start) { in SYSCALL_DEFINE3()
76 start = vma->vm_start; in SYSCALL_DEFINE3()
88 fstart = (start - vma->vm_start) + in SYSCALL_DEFINE3()
A Dmadvise.c138 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in madvise_behavior()
149 if (start != vma->vm_start) { in madvise_behavior()
299 offset = (loff_t)(start - vma->vm_start) in madvise_willneed()
719 range.start = max(vma->vm_start, start_addr); in madvise_free_single_vma()
723 if (range.end <= vma->vm_start) in madvise_free_single_vma()
787 if (start < vma->vm_start) { in madvise_dontneed_free()
847 if (!vma || start < vma->vm_start) in madvise_populate()
911 offset = (loff_t)(start - vma->vm_start) in madvise_remove()
1178 if (vma && start > vma->vm_start) in do_madvise()
1189 if (start < vma->vm_start) { in do_madvise()
[all …]
A Dinternal.h400 munlock_vma_pages_range(vma, vma->vm_start, vma->vm_end); in munlock_vma_pages_all()
439 address = vma->vm_start + in vma_address()
442 if (address < vma->vm_start || address >= vma->vm_end) in vma_address()
447 address = vma->vm_start; in vma_address()
467 address = vma->vm_start + ((pgoff - vma->vm_pgoff) << PAGE_SHIFT); in vma_address_end()
469 if (address < vma->vm_start || address > vma->vm_end) in vma_address_end()
A Dpagewalk.c453 } else if (start < vma->vm_start) { /* outside vma */ in walk_page_range()
455 next = min(end, vma->vm_start); in walk_page_range()
525 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
530 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
587 start_addr = ((cba - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping()
588 end_addr = ((cea - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping()
595 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()
A Dmprotect.c464 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in mprotect_fixup()
476 if (start != vma->vm_start) { in mprotect_fixup()
568 if (vma->vm_start >= end) in do_mprotect_pkey()
570 start = vma->vm_start; in do_mprotect_pkey()
575 if (vma->vm_start > start) in do_mprotect_pkey()
585 if (start > vma->vm_start) in do_mprotect_pkey()
651 if (!vma || vma->vm_start != nstart) { in do_mprotect_pkey()
/linux/drivers/gpu/drm/
A Ddrm_vm.c79 vma->vm_start)) in drm_io_prot()
210 offset = vmf->address - vma->vm_start; in drm_vm_shm_fault()
240 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_shm_close()
314 offset = vmf->address - vma->vm_start; in drm_vm_dma_fault()
351 offset = vmf->address - vma->vm_start; in drm_vm_sg_fault()
395 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_open_locked()
421 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_close_locked()
559 if (map->size < vma->vm_end - vma->vm_start) in drm_mmap_locked()
598 if (io_remap_pfn_range(vma, vma->vm_start, in drm_mmap_locked()
600 vma->vm_end - vma->vm_start, in drm_mmap_locked()
[all …]
/linux/mm/damon/
A Dvaddr-test.h35 gap = vmas[j].vm_start - vmas[j - 1].vm_end; in __link_vmas()
78 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas()
79 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas()
80 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas()
81 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas()
82 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas()
83 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
/linux/arch/parisc/mm/
A Dfault.c128 if (tree->vm_start > addr) {
134 if (prev->vm_next->vm_start > addr)
255 vma->vm_start, vma->vm_end); in show_signal_msg()
289 if (!vma || address < vma->vm_start) in do_page_fault()
359 address < vma->vm_start || address >= vma->vm_end) { in do_page_fault()
/linux/fs/proc/
A Dtask_nommu.c37 size += region->vm_end - region->vm_start; in task_mem()
39 size = vma->vm_end - vma->vm_start; in task_mem()
92 vsize += vma->vm_end - vma->vm_start; in task_vsize()
114 size += region->vm_end - region->vm_start; in task_statm()
138 return vma->vm_start <= mm->start_stack && in is_stack()
167 vma->vm_start, in nommu_vma_show()
/linux/drivers/media/v4l2-core/
A Dvideobuf-vmalloc.c56 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
68 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
227 buf->baddr = vma->vm_start; in __videobuf_mmap_mapper()
233 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper()
254 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
/linux/drivers/char/
A Dmspec.c75 unsigned long vm_start; /* Original (unsplit) base. */ member
114 last_index = (vdata->vm_end - vdata->vm_start) >> PAGE_SHIFT; in mspec_close()
202 vdata->vm_start = vma->vm_start; in mspec_mmap()
/linux/scripts/coccinelle/api/
A Dvma_pages.cocci22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT
32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT)
44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
/linux/arch/ia64/include/asm/
A Dfb.h13 if (efi_range_is_wc(vma->vm_start, vma->vm_end - vma->vm_start)) in fb_pgprotect()
/linux/arch/csky/include/asm/
A Dtlb.h11 flush_cache_range(vma, (vma)->vm_start, (vma)->vm_end); \
17 flush_tlb_range(vma, (vma)->vm_start, (vma)->vm_end); \
/linux/drivers/soc/qcom/
A Drmtfs_mem.c136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap()
139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap()
140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap()
146 vma->vm_start, in qcom_rmtfs_mem_mmap()
148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
/linux/arch/arm64/kernel/
A Dvdso.c84 current->mm->context.vdso = (void *)new_vma->vm_start; in vdso_mremap()
143 unsigned long size = vma->vm_end - vma->vm_start; in vdso_join_timens()
146 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
149 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
277 current->mm->context.sigpage = (void *)new_vma->vm_start; in aarch32_sigpage_mremap()
/linux/arch/x86/um/
A Dmem_32.c17 gate_vma.vm_start = FIXADDR_USER_START; in gate_vma_init()
49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
/linux/arch/arc/kernel/
A Darc_hostlink.c22 if (io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in arc_hl_mmap()
23 vma->vm_end - vma->vm_start, in arc_hl_mmap()
/linux/arch/powerpc/platforms/book3s/
A Dvas-api.c364 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) { in coproc_mmap()
366 (vma->vm_end - vma->vm_start), PAGE_SIZE); in coproc_mmap()
395 rc = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in coproc_mmap()
396 vma->vm_end - vma->vm_start, prot); in coproc_mmap()
399 paste_addr, vma->vm_start, rc); in coproc_mmap()
/linux/arch/riscv/kvm/
A Dmmu.c517 hva_t vm_start, vm_end; in kvm_arch_prepare_memory_region() local
519 if (!vma || vma->vm_start >= reg_end) in kvm_arch_prepare_memory_region()
532 vm_start = max(hva, vma->vm_start); in kvm_arch_prepare_memory_region()
537 (vm_start - mem->userspace_addr); in kvm_arch_prepare_memory_region()
541 pa += vm_start - vma->vm_start; in kvm_arch_prepare_memory_region()
550 vm_end - vm_start, writable); in kvm_arch_prepare_memory_region()
/linux/fs/
A Duserfaultfd.c1332 if (vma->vm_start >= end) in userfaultfd_register()
1379 end > cur->vm_start) { in userfaultfd_register()
1411 if (vma->vm_start < start) in userfaultfd_register()
1431 if (vma->vm_start > start) in userfaultfd_register()
1432 start = vma->vm_start; in userfaultfd_register()
1444 if (vma->vm_start < start) { in userfaultfd_register()
1538 if (vma->vm_start >= end) in userfaultfd_unregister()
1577 if (vma->vm_start < start) in userfaultfd_unregister()
1595 if (vma->vm_start > start) in userfaultfd_unregister()
1596 start = vma->vm_start; in userfaultfd_unregister()
[all …]

Completed in 60 milliseconds

12345678910>>...12