Home
last modified time | relevance | path

Searched refs:vaddr_end (Results 1 – 9 of 9) sorted by relevance

/linux/arch/x86/mm/
A Dmem_encrypt_identity.c80 unsigned long vaddr_end; member
106 pgd_end = ppd->vaddr_end & PGDIR_MASK; in sme_clear_pgd()
196 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pmd()
217 unsigned long vaddr_end; in __sme_map_range() local
223 vaddr_end = ppd->vaddr_end; in __sme_map_range()
230 ppd->vaddr_end = vaddr_end & PMD_PAGE_MASK; in __sme_map_range()
234 ppd->vaddr_end = vaddr_end; in __sme_map_range()
402 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel()
436 ppd.vaddr_end = kernel_end; in sme_encrypt_kernel()
449 ppd.vaddr_end = initrd_end; in sme_encrypt_kernel()
[all …]
A Dmem_encrypt.c265 unsigned long vaddr_end = vaddr + sz; in notify_range_enc_status_changed() local
267 while (vaddr < vaddr_end) { in notify_range_enc_status_changed()
336 unsigned long vaddr_end, vaddr_next, start; in early_set_memory_enc_dec() local
344 vaddr_end = vaddr + size; in early_set_memory_enc_dec()
346 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec()
369 ((vaddr_end - vaddr) >= psize)) { in early_set_memory_enc_dec()
391 __pa((vaddr_end & pmask) + psize), in early_set_memory_enc_dec()
446 unsigned long vaddr, vaddr_end, npages; in mem_encrypt_free_decrypted_mem() local
450 vaddr_end = (unsigned long)__end_bss_decrypted; in mem_encrypt_free_decrypted_mem()
451 npages = (vaddr_end - vaddr) >> PAGE_SHIFT; in mem_encrypt_free_decrypted_mem()
[all …]
A Dkaslr.c41 static const unsigned long vaddr_end = CPU_ENTRY_AREA_BASE; variable
81 BUILD_BUG_ON(vaddr_start >= vaddr_end); in kernel_randomize_memory()
82 BUILD_BUG_ON(vaddr_end != CPU_ENTRY_AREA_BASE); in kernel_randomize_memory()
83 BUILD_BUG_ON(vaddr_end > __START_KERNEL_map); in kernel_randomize_memory()
112 remain_entropy = vaddr_end - vaddr_start; in kernel_randomize_memory()
A Dinit_64.c424 unsigned long vaddr_end = __START_KERNEL_map + KERNEL_IMAGE_SIZE; in cleanup_highmap() local
434 vaddr_end = __START_KERNEL_map + (max_pfn_mapped << PAGE_SHIFT); in cleanup_highmap()
682 vaddr_end = (unsigned long)__va(paddr_end); in phys_p4d_init()
688 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in phys_p4d_init()
708 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
714 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
736 vaddr_end = (unsigned long)__va(paddr_end); in __kernel_physical_mapping_init()
739 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in __kernel_physical_mapping_init()
748 __pa(vaddr_end), in __kernel_physical_mapping_init()
755 paddr_last = phys_p4d_init(p4d, __pa(vaddr), __pa(vaddr_end), in __kernel_physical_mapping_init()
[all …]
/linux/arch/x86/kernel/
A Dhead64.c138 unsigned long vaddr, vaddr_end; in __startup_64() local
295 vaddr_end = (unsigned long)__end_bss_decrypted; in __startup_64()
296 for (; vaddr < vaddr_end; vaddr += PMD_SIZE) { in __startup_64()
/linux/Documentation/x86/x86_64/
A Dmm.rst58 | | | | vaddr_end for KASLR
117 | | | | vaddr_end for KASLR
/linux/mm/
A Dhuge_memory.c2915 unsigned long vaddr_end) in split_huge_pages_pid() argument
2924 vaddr_end &= PAGE_MASK; in split_huge_pages_pid()
2947 pid, vaddr_start, vaddr_end); in split_huge_pages_pid()
2954 for (addr = vaddr_start; addr < vaddr_end; addr += PAGE_SIZE) { in split_huge_pages_pid()
3073 unsigned long vaddr_start, vaddr_end; in split_huge_pages_write() local
3114 ret = sscanf(input_buf, "%d,0x%lx,0x%lx", &pid, &vaddr_start, &vaddr_end); in split_huge_pages_write()
3124 ret = split_huge_pages_pid(pid, vaddr_start, vaddr_end); in split_huge_pages_write()
/linux/arch/x86/kvm/svm/
A Dsev.c493 unsigned long vaddr, vaddr_end, next_vaddr, npages, pages, size, i; in sev_launch_update_data() local
508 vaddr_end = vaddr + size; in sev_launch_update_data()
524 for (i = 0; vaddr < vaddr_end; vaddr = next_vaddr, i += pages) { in sev_launch_update_data()
908 unsigned long vaddr, vaddr_end, next_vaddr; in sev_dbg_crypt() local
929 vaddr_end = vaddr + size; in sev_dbg_crypt()
932 for (; vaddr < vaddr_end; vaddr = next_vaddr) { in sev_dbg_crypt()
/linux/arch/x86/xen/
A Dmmu_pv.c1003 unsigned long vaddr_end) in xen_cleanhighmap() argument
1010 for (; vaddr <= vaddr_end && (pmd < (level2_kernel_pgt + PTRS_PER_PMD)); in xen_cleanhighmap()
1028 void *vaddr_end = vaddr + size; in xen_free_ro_pages() local
1030 for (; vaddr < vaddr_end; vaddr += PAGE_SIZE) in xen_free_ro_pages()

Completed in 28 milliseconds