Searched refs:mmfr0 (Results 1 – 7 of 7) sorted by relevance
/linux/arch/arm64/include/asm/ |
A D | cpufeature.h | 653 u64 mmfr0; in system_supports_4kb_granule() local 656 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_4kb_granule() 657 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_4kb_granule() 666 u64 mmfr0; in system_supports_64kb_granule() local 669 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_64kb_granule() 670 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_64kb_granule() 679 u64 mmfr0; in system_supports_16kb_granule() local 682 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_16kb_granule() 683 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_16kb_granule() 697 u64 mmfr0; in system_supports_mixed_endian() local [all …]
|
A D | kvm_pgtable.h | 16 static inline u64 kvm_get_parange(u64 mmfr0) in kvm_get_parange() argument 18 u64 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_get_parange() 268 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift);
|
/linux/arch/arm64/kvm/ |
A D | reset.c | 319 u64 mmfr0; in kvm_set_ipa_limit() local 321 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_set_ipa_limit() 322 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_set_ipa_limit() 337 switch (cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_TGRAN_2_SHIFT)) { in kvm_set_ipa_limit() 362 u64 mmfr0, mmfr1; in kvm_arm_setup_stage2() local 382 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_arm_setup_stage2() 384 kvm->arch.vtcr = kvm_get_vtcr(mmfr0, mmfr1, phys_shift); in kvm_arm_setup_stage2()
|
/linux/arch/arm64/mm/ |
A D | init.c | 287 u64 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in arm64_memblock_init() local 289 mmfr0, ID_AA64MMFR0_PARANGE_SHIFT); in arm64_memblock_init()
|
/linux/arch/arm/kernel/ |
A D | setup.c | 261 unsigned int mmfr0 = read_cpuid_ext(CPUID_EXT_MMFR0); in __get_cpu_architecture() local 262 if ((mmfr0 & 0x0000000f) >= 0x00000003 || in __get_cpu_architecture() 263 (mmfr0 & 0x000000f0) >= 0x00000030) in __get_cpu_architecture() 265 else if ((mmfr0 & 0x0000000f) == 0x00000002 || in __get_cpu_architecture() 266 (mmfr0 & 0x000000f0) == 0x00000020) in __get_cpu_architecture()
|
/linux/arch/arm64/kvm/hyp/ |
A D | pgtable.c | 520 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift) in kvm_get_vtcr() argument 525 vtcr |= kvm_get_parange(mmfr0) << VTCR_EL2_PS_SHIFT; in kvm_get_vtcr()
|
/linux/arch/arm64/kernel/ |
A D | cpufeature.c | 2778 u64 safe_mmfr1, mmfr0, mmfr1; in verify_hyp_capabilities() local 2786 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in verify_hyp_capabilities() 2798 parange = cpuid_feature_extract_unsigned_field(mmfr0, in verify_hyp_capabilities()
|
Completed in 27 milliseconds