/linux/arch/powerpc/kernel/ |
A D | head_40x.S | 69 lis r0,MSR_KERNEL@h 70 ori r0,r0,MSR_KERNEL@l 129 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL & ~(MSR_ME|MSR_DE|MSR_CE)) /* re-enable MMU */ 619 lis r3,(MSR_KERNEL & ~(MSR_IR|MSR_DR))@h 620 ori r3,r3,(MSR_KERNEL & ~(MSR_IR|MSR_DR))@l 643 lis r4,MSR_KERNEL@h 644 ori r4,r4,MSR_KERNEL@l
|
A D | head_32.h | 66 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL & ~MSR_RI) /* re-enable MMU */ 104 li r10, MSR_KERNEL /* can take exceptions */ 146 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL) /* can take exceptions */
|
A D | fsl_booke_entry_mapping.S | 208 lis r7,MSR_KERNEL@h 209 ori r7,r7,MSR_KERNEL@l
|
A D | entry_32.S | 557 LOAD_REG_IMMEDIATE(r0,MSR_KERNEL) 559 li r9,MSR_KERNEL & ~(MSR_IR|MSR_DR) 568 LOAD_REG_IMMEDIATE(r9,MSR_KERNEL)
|
A D | head_book3s_32.S | 873 li r4,MSR_KERNEL 997 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 1021 li r4,MSR_KERNEL 1087 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR) 1110 li r3, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI)
|
A D | head_fsl_booke.S | 287 lis r3,MSR_KERNEL@h 288 ori r3,r3,MSR_KERNEL@l 1047 lis r4,MSR_KERNEL@h 1048 ori r4,r4,MSR_KERNEL@l
|
A D | idle_book3s.S | 200 LOAD_REG_IMMEDIATE(r7, MSR_KERNEL|MSR_EE|MSR_POW)
|
A D | head_8xx.S | 572 li r3,MSR_KERNEL & ~(MSR_IR|MSR_DR) 613 li r4,MSR_KERNEL 695 li r12, MSR_KERNEL & ~(MSR_IR | MSR_DR | MSR_RI)
|
A D | paca.c | 224 new_paca->kernel_msr = MSR_KERNEL & ~(MSR_IR | MSR_DR); in initialise_paca()
|
A D | head_64.S | 235 LOAD_REG_IMMEDIATE(r5, MSR_KERNEL) 838 LOAD_REG_IMMEDIATE(r4, MSR_KERNEL)
|
A D | head_booke.h | 56 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL); \ 205 LOAD_REG_IMMEDIATE(r11, MSR_KERNEL & ~(MSR_ME|MSR_DE|MSR_CE)); \
|
A D | optprobes.c | 74 regs.msr = MSR_KERNEL; in can_optimize()
|
A D | align.c | 314 if ((regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE)) { in fix_alignment()
|
A D | head_44x.S | 227 lis r3,MSR_KERNEL@h 228 ori r3,r3,MSR_KERNEL@l
|
A D | signal_32.c | 806 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (MSR_KERNEL & MSR_LE)); in handle_rt_signal32() 894 regs_set_return_msr(regs, (regs->msr & ~MSR_LE) | (MSR_KERNEL & MSR_LE)); in handle_signal32()
|
A D | setup_64.c | 270 get_paca()->kernel_msr = MSR_KERNEL; in cpu_ready_for_interrupts()
|
A D | exceptions-64e.S | 1263 lis r7,MSR_KERNEL@h 1264 ori r7,r7,MSR_KERNEL@l
|
/linux/arch/powerpc/include/asm/ |
A D | reg_booke.h | 42 #define MSR_KERNEL (MSR_ | MSR_64BIT) macro 46 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR|MSR_CE) macro 47 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 49 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_CE) macro 50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
A D | reg.h | 142 #define MSR_KERNEL (MSR_ | MSR_64BIT) macro 147 #define MSR_KERNEL (MSR_ME|MSR_RI|MSR_IR|MSR_DR) macro 148 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
A D | kvm_book3s.h | 401 return (kvmppc_get_msr(vcpu) & MSR_LE) != (MSR_KERNEL & MSR_LE); in kvmppc_need_byteswap()
|
/linux/arch/microblaze/include/asm/ |
A D | registers.h | 36 # define MSR_KERNEL (MSR_EE | MSR_VM) macro
|
/linux/arch/powerpc/platforms/powernv/ |
A D | idle.c | 546 mtmsr(MSR_KERNEL); in power7_offline() 563 mtmsr(MSR_KERNEL); in power7_idle_type() 802 mtmsr(MSR_KERNEL); in power9_idle_stop() 987 mtmsr(MSR_KERNEL); in power10_idle_stop()
|
/linux/arch/microblaze/kernel/ |
A D | head.S | 354 ori r4,r0,MSR_KERNEL
|
/linux/arch/powerpc/lib/ |
A D | test_emulate_step.c | 1688 exp.msr = MSR_KERNEL; in run_tests_compute() 1689 got.msr = MSR_KERNEL; in run_tests_compute()
|
A D | sstep.c | 3336 cross_endian = (regs->msr & MSR_LE) != (MSR_KERNEL & MSR_LE); in emulate_loadstore() 3666 regs->gpr[10] = MSR_KERNEL; in emulate_step() 3671 regs_set_return_msr(regs, MSR_KERNEL); in emulate_step() 3677 regs->gpr[10] = MSR_KERNEL; in emulate_step() 3682 regs_set_return_msr(regs, MSR_KERNEL); in emulate_step()
|