/linux/arch/x86/power/ |
A D | hibernate_asm_64.S | 34 movq %rax, %rdx 42 movq $saved_context, %rax 43 movq pt_regs_sp(%rax), %rsp 50 movq pt_regs_r8(%rax), %r8 51 movq pt_regs_r9(%rax), %r9 58 pushq pt_regs_flags(%rax) 73 movq $saved_context, %rax 90 popq pt_regs_flags(%rax) 93 movq %cr3, %rax 108 movq temp_pgt(%rip), %rax [all …]
|
/linux/arch/x86/kernel/acpi/ |
A D | wakeup_64.S | 20 movq saved_magic, %rax 22 cmpq %rdx, %rax 43 movq saved_rip, %rax 45 jmp *%rax 54 movq $saved_context, %rax 55 movq %rsp, pt_regs_sp(%rax) 62 movq %r8, pt_regs_r8(%rax) 63 movq %r9, pt_regs_r9(%rax) 71 popq pt_regs_flags(%rax) 91 movq $saved_context, %rax [all …]
|
/linux/arch/x86/lib/ |
A D | memset_64.S | 38 imulq %rsi,%rax 42 movq %r9,%rax 65 movq %r9,%rax 75 imulq %rcx,%rax 90 movq %rax,(%rdi) 91 movq %rax,8(%rdi) 92 movq %rax,16(%rdi) 93 movq %rax,24(%rdi) 94 movq %rax,32(%rdi) 112 movq %rax,(%rdi) [all …]
|
A D | hweight.S | 45 movabsq $0x5555555555555555, %rax 47 andq %rdx, %rax # t &= 0x5555555555555555 49 subq %rax, %rdi # w -= t 51 movq %rdi, %rax # w -> t 53 andq %rdx, %rax # t &= 0x3333333333333333 55 addq %rdx, %rax # w = w_tmp + t 57 movq %rax, %rdx # w -> t 59 addq %rdx, %rax # w_tmp += t 61 andq %rdx, %rax # w_tmp &= 0x0f0f0f0f0f0f0f0f 63 imulq %rdx, %rax # w_tmp *= 0x0101010101010101 [all …]
|
A D | csum-copy_64.S | 95 adcq %rbx, %rax 96 adcq %r8, %rax 97 adcq %r11, %rax 98 adcq %rdx, %rax 99 adcq %r10, %rax 100 adcq %r15, %rax 101 adcq %r14, %rax 102 adcq %r13, %rax 129 adcq %r9, %rax 143 adcq %rbx, %rax [all …]
|
/linux/tools/arch/x86/lib/ |
A D | memset_64.S | 38 imulq %rsi,%rax 42 movq %r9,%rax 65 movq %r9,%rax 75 imulq %rcx,%rax 90 movq %rax,(%rdi) 91 movq %rax,8(%rdi) 92 movq %rax,16(%rdi) 93 movq %rax,24(%rdi) 94 movq %rax,32(%rdi) 112 movq %rax,(%rdi) [all …]
|
/linux/arch/x86/kernel/ |
A D | relocate_kernel_64.S | 64 movq %cr0, %rax 66 movq %cr3, %rax 68 movq %cr4, %rax 72 movq %rax, %r13 126 movq %cr0, %rax 129 movq %rax, %cr0 141 movq %rax, %cr4 168 movq %cr3, %rax 169 movq %rax, %cr3 208 movq %rax, %cr3 [all …]
|
A D | ftrace_64.S | 81 movq %rax, RAX(%rsp) 126 movq RAX(%rsp), %rax 161 movq RIP(%rsp), %rax 162 movq %rax, MCOUNT_REG_SIZE(%rsp) 227 movq EFLAGS(%rsp), %rax 231 movq RIP(%rsp), %rax 242 movq ORIG_RAX(%rsp), %rax 249 testq %rax, %rax 313 movq %rax, (%rsp) 319 movq %rax, %rdi [all …]
|
A D | head_64.S | 74 leaq .Lon_kernel_cs(%rip), %rax 75 pushq %rax 154 addq phys_base(%rip), %rax 165 movq %rax, %rdi 170 movq %rax, %cr3 173 movq $1f, %rax 175 jmp *%rax 242 movq %rax, %cr0 280 movq initial_code(%rip), %rax 323 call *%rax [all …]
|
/linux/arch/x86/kernel/cpu/sgx/ |
A D | encls.h | 85 #define __encls_ret_N(rax, inputs...) \ argument 97 : "a"(rax), inputs \ 102 #define __encls_ret_1(rax, rcx) \ argument 104 __encls_ret_N(rax, "c"(rcx)); \ 107 #define __encls_ret_2(rax, rbx, rcx) \ argument 109 __encls_ret_N(rax, "b"(rbx), "c"(rcx)); \ 112 #define __encls_ret_3(rax, rbx, rcx, rdx) \ argument 132 #define __encls_N(rax, rbx_out, inputs...) \ argument 145 : "a"(rax), inputs \ 150 #define __encls_2(rax, rbx, rcx) \ argument [all …]
|
/linux/arch/x86/boot/compressed/ |
A D | efi_thunk_64.S | 32 push %rax 34 push %rax 36 push %rax 62 leaq efi32_boot_idt(%rip), %rax 63 lidt (%rax) 65 lgdt (%rax) 69 pushq %rax 70 leaq efi_enter32(%rip), %rax 71 pushq %rax 75 movq %rdi, %rax [all …]
|
A D | head_64.S | 392 notq %rax 435 addq %rax, 2(%rax) 436 lgdt (%rax) 441 pushq %rax 479 leaq TRAMPOLINE_32BIT_CODE_OFFSET(%rax), %rax 480 pushq %rax 526 lgdt (%rax) 532 jmp *%rax 543 leaq rva(startup_64)(%rax), %rax 544 jmp *%rax [all …]
|
/linux/arch/x86/crypto/ |
A D | poly1305-x86_64-cryptogams.pl | 188 mov %rax,$d2 198 add %rax,$d2 465 lea (%rax,%rax,4),%eax # *5 477 or $d1,%rax 482 lea (%rax,%rax,4),%eax # *5 497 lea (%rax,%rax,4),%eax # *5 536 lea (%rax,%rax,4),%eax # *5 576 lea (%rax,%rax,4),%eax # *5 2877 lea (%rax,%rax,4),%rax # *5 4017 lea 48(%rax),%rax [all …]
|
A D | blake2s-core.S | 71 movd (%rsi,%rax,4),%xmm4 73 movd (%rsi,%rax,4),%xmm5 75 movd (%rsi,%rax,4),%xmm6 77 movd (%rsi,%rax,4),%xmm7 92 movd (%rsi,%rax,4),%xmm5 94 movd (%rsi,%rax,4),%xmm6 96 movd (%rsi,%rax,4),%xmm7 98 movd (%rsi,%rax,4),%xmm4 116 movd (%rsi,%rax,4),%xmm6 196 leaq SIGMA2(%rip),%rax [all …]
|
A D | chacha-avx2-x86_64.S | 67 mov %rcx,%rax 142 cmp $0x10,%rax 149 cmp $0x20,%rax 156 cmp $0x30,%rax 163 cmp $0x40,%rax 171 cmp $0x50,%rax 177 cmp $0x60,%rax 183 cmp $0x70,%rax 200 mov %rax,%r9 268 mov %rcx,%rax [all …]
|
A D | crc32c-pcl-intel-asm_64.S | 218 shlq $3, %rax # rax *= 8 219 pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2 220 leal (%eax,%eax,2), %eax # rax *= 3 (total *24) 221 subq %rax, tmp # tmp -= rax*24 230 movq %xmm1, %rax 231 xor -i*8(block_2), %rax 233 crc32 %rax, crc_init 305 movq crc_init, %rax
|
A D | chacha-ssse3-x86_64.S | 132 mov %rcx,%rax 137 cmp $0x10,%rax 145 cmp $0x20,%rax 153 cmp $0x30,%rax 161 cmp $0x40,%rax 173 mov %rax,%r9 176 and $~0x0f,%rax 243 mov %rcx,%rax 652 cmp $0x10,%rax 659 cmp $0x20,%rax [all …]
|
/linux/arch/x86/entry/ |
A D | entry_64.S | 279 movq %rax, %rdi 308 pushq %rax 314 popq %rax 681 movq %rax, (1*8)(%rdi) 683 movq %rax, (2*8)(%rdi) 685 movq %rax, (3*8)(%rdi) 687 movq %rax, (5*8)(%rdi) 689 movq %rax, (4*8)(%rdi) 708 movq %rax, %rsp 1005 cmpq %rax, RIP+8(%rsp) [all …]
|
A D | calling.h | 66 .macro PUSH_REGS rdx=%rdx rax=%rax save_ret=0 77 pushq \rax /* pt_regs->ax */ 117 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rax=%rax save_ret=0 118 PUSH_REGS rdx=\rdx, rax=\rax, save_ret=\save_ret 137 popq %rax 218 pushq %rax 219 SWITCH_TO_USER_CR3_NOSTACK scratch_reg=\scratch_reg scratch_reg2=%rax 220 popq %rax
|
/linux/tools/testing/selftests/kvm/x86_64/ |
A D | xen_vmcall_test.c | 35 unsigned long rax = INPUTVALUE; in guest_code() local 46 "=a"(rax) : in guest_code() 47 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 49 GUEST_ASSERT(rax == RETVALUE); in guest_code() 67 __asm__ __volatile__("call *%1" : "=a"(rax) : in guest_code() 69 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 71 GUEST_ASSERT(rax == RETVALUE); in guest_code() 74 rax = 0; in guest_code() 77 __asm__ __volatile__("call *%1" : "=a"(rax) : in guest_code() 79 "a"(rax), "c"(rcx), "d"(rdx), in guest_code() [all …]
|
/linux/arch/x86/um/ |
A D | stub_64.S | 12 mov 0x0(%rsp), %rax 16 mov %rax, 8(%rbx) 17 cmp $0, %rax 24 add %rax, %rsp 27 pop %rax 42 cmp %rcx, %rax 47 mov %rax, (%rbx)
|
/linux/arch/x86/platform/efi/ |
A D | efi_thunk_64.S | 35 movq %rsp, %rax 37 push %rax 42 movq $__START_KERNEL_map, %rax 43 subq phys_base(%rip), %rax 47 subq %rax, %rbp 48 subq %rax, %rbx
|
/linux/arch/x86/mm/ |
A D | mem_encrypt_boot.S | 37 movq %rcx, %rax /* Workarea stack page */ 38 leaq PAGE_SIZE(%rax), %rsp /* Set new stack pointer */ 39 addq $PAGE_SIZE, %rax /* Workarea encryption routine */ 47 movq %rax, %rdi /* Workarea encryption routine */ 57 movq %rax, %r8 /* Workarea encryption routine */ 61 call *%rax /* Call the encryption routine */
|
/linux/arch/x86/entry/vdso/ |
A D | vsgx.S | 108 mov %rbx, %rax 119 push %rax 122 push %rax 131 mov SGX_ENCLAVE_RUN_USER_HANDLER(%rax), %rax 133 call *%rax
|
/linux/tools/testing/selftests/sgx/ |
A D | test_encl_bootstrap.S | 46 lea (encl_stack)(%rbx), %rax 47 xchg %rsp, %rax 48 push %rax 75 pop %rax 76 mov %rax, %rsp 79 mov $4, %rax
|