/linux/arch/powerpc/include/asm/book3s/64/ |
A D | hash-64k.h | 94 real_pte_t rpte; in __real_pte() local 97 rpte.pte = pte; in __real_pte() 107 rpte.hidx = *hidxp; in __real_pte() 108 return rpte; in __real_pte() 125 return HIDX_UNSHIFT_BY_ONE(BITS_TO_HIDX(rpte.hidx, index)); in __rpte_to_hidx() 132 static inline unsigned long pte_set_hidx(pte_t *ptep, real_pte_t rpte, in pte_set_hidx() argument 138 rpte.hidx &= ~HIDX_BITS(0xfUL, subpg_index); in pte_set_hidx() 139 *hidxp = rpte.hidx | HIDX_BITS(HIDX_SHIFT_BY_ONE(hidx), subpg_index); in pte_set_hidx() 153 extern bool __rpte_sub_valid(real_pte_t rpte, unsigned long index); 158 #define pte_iterate_hashed_subpages(rpte, psize, vpn, index, shift) \ argument [all …]
|
A D | hash-4k.h | 99 static inline unsigned long pte_set_hidx(pte_t *ptep, real_pte_t rpte, in pte_set_hidx() argument
|
A D | hash.h | 217 int ssize, real_pte_t rpte, unsigned int subpg_index);
|
A D | pgtable.h | 350 #define pte_iterate_hashed_subpages(rpte, psize, va, index, shift) \ argument
|
/linux/arch/powerpc/mm/book3s64/ |
A D | hash_64k.c | 31 bool __rpte_sub_valid(real_pte_t rpte, unsigned long index) in __rpte_sub_valid() argument 33 return !(hpte_soft_invalid(__rpte_to_hidx(rpte, index))); in __rpte_sub_valid() 40 real_pte_t rpte; in __hash_page_4K() local 89 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_4K() 100 flush_hash_page(vpn, rpte, MMU_PAGE_64K, ssize, flags); in __hash_page_4K() 113 if (__rpte_sub_valid(rpte, subpg_index)) { in __hash_page_4K() 116 gslot = pte_get_hash_gslot(vpn, shift, ssize, rpte, in __hash_page_4K() 140 rpte.hidx = INVALID_RPTE_HIDX; in __hash_page_4K() 227 real_pte_t rpte; in __hash_page_64K() local 264 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_64K() [all …]
|
A D | hash_4k.c | 23 real_pte_t rpte; in __hash_page_4K() local 58 rpte = __real_pte(__pte(old_pte), ptep, PTRS_PER_PTE); in __hash_page_4K() 70 rpte, 0); in __hash_page_4K() 120 new_pte |= pte_set_hidx(ptep, rpte, 0, slot, PTRS_PER_PTE); in __hash_page_4K()
|
A D | hash_hugetlbpage.c | 23 real_pte_t rpte; in __hash_page_huge() local 74 rpte = __real_pte(__pte(old_pte), ptep, offset); in __hash_page_huge() 88 gslot = pte_get_hash_gslot(vpn, shift, ssize, rpte, 0); in __hash_page_huge() 116 new_pte |= pte_set_hidx(ptep, rpte, 0, slot, offset); in __hash_page_huge()
|
A D | hash_tlb.c | 48 real_pte_t rpte; in hpte_need_flush() local 97 rpte = __real_pte(__pte(pte), ptep, offset); in hpte_need_flush() 104 flush_hash_page(vpn, rpte, psize, ssize, mm_is_thread_local(mm)); in hpte_need_flush() 129 batch->pte[i] = rpte; in hpte_need_flush()
|
A D | hash_utils.c | 1804 int ssize, real_pte_t rpte, unsigned int subpg_index) in pte_get_hash_gslot() argument 1809 hidx = __rpte_to_hidx(rpte, subpg_index); in pte_get_hash_gslot()
|
/linux/arch/powerpc/kvm/ |
A D | book3s_64_mmu_hv.c | 520 r = vcpu->arch.pgfault_cache->rpte; in kvmppc_book3s_hv_page_fault() 1212 unsigned long vpte, rpte, guest_rpte; in resize_hpt_rehash_hpte() local 1239 rpte = be64_to_cpu(hptep[1]); in resize_hpt_rehash_hpte() 1240 vpte = hpte_new_to_old_v(vpte, rpte); in resize_hpt_rehash_hpte() 1279 rpte = be64_to_cpu(hptep[1]); in resize_hpt_rehash_hpte() 1282 vpte = hpte_new_to_old_v(vpte, rpte); in resize_hpt_rehash_hpte() 1283 rpte = hpte_new_to_old_r(rpte); in resize_hpt_rehash_hpte() 1286 pshift = kvmppc_hpte_base_page_shift(vpte, rpte); in resize_hpt_rehash_hpte() 1343 rpte = hpte_old_to_new_r(vpte, rpte); in resize_hpt_rehash_hpte() 1347 new_hptep[1] = cpu_to_be64(rpte); in resize_hpt_rehash_hpte()
|
A D | book3s_64_mmu_radix.c | 139 __be64 rpte; in kvmppc_mmu_walk_radix_tree() local 166 addr = base + (index * sizeof(rpte)); in kvmppc_mmu_walk_radix_tree() 168 ret = kvm_read_guest(kvm, addr, &rpte, sizeof(rpte)); in kvmppc_mmu_walk_radix_tree() 175 pte = __be64_to_cpu(rpte); in kvmppc_mmu_walk_radix_tree()
|
A D | book3s_hv_rm_mmu.c | 1220 gr = cache_entry->rpte; in kvmppc_hpte_hv_fault() 1296 cache_entry->rpte = gr; in kvmppc_hpte_hv_fault()
|
/linux/arch/powerpc/include/asm/ |
A D | kvm_host.h | 467 unsigned long rpte; member
|