Home
last modified time | relevance | path

Searched refs:rdx (Results 1 – 25 of 114) sorted by relevance

12345

/linux/arch/x86/lib/
A Dmemmove_64.S36 add %rdx, %r8
49 cmp $680, %rdx
58 sub $0x20, %rdx
63 sub $0x20, %rdx
83 movq %rdx, %rcx
116 cmp $680, %rdx
151 cmpq $16, %rdx
167 cmpq $8, %rdx
178 cmpq $4, %rdx
189 cmp $2, %rdx
[all …]
A Dmemcpy_64.S36 movq %rdx, %rcx
54 movq %rdx, %rcx
62 cmpq $0x20, %rdx
71 subq $0x20, %rdx
73 subq $0x20, %rdx
97 addq %rdx, %rsi
98 addq %rdx, %rdi
99 subq $0x20, %rdx
106 subq $0x20, %rdx
123 subq %rdx, %rsi
[all …]
A Dhweight.S42 pushq %rdx
44 movq %rdi, %rdx # w -> t
46 shrq %rdx # t >>= 1
48 movabsq $0x3333333333333333, %rdx
55 addq %rdx, %rax # w = w_tmp + t
57 movq %rax, %rdx # w -> t
58 shrq $4, %rdx # t >>= 4
59 addq %rdx, %rax # w_tmp += t
60 movabsq $0x0f0f0f0f0f0f0f0f, %rdx
62 movabsq $0x0101010101010101, %rdx
[all …]
A Dmemset_64.S32 movq %rdx,%rcx
63 movq %rdx,%rcx
83 movq %rdx,%rcx
131 cmpq $7,%rdx
137 subq %r8,%rdx
A Dcopy_page_64.S35 movq 0x8*2(%rsi), %rdx
46 movq %rdx, 0x8*2(%rdi)
65 movq 0x8*2(%rsi), %rdx
74 movq %rdx, 0x8*2(%rdi)
A Dgetuser.S42 ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
43 __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
100 4: movq (%_ASM_AX),%rdx
155 9: movq (%_ASM_AX),%rdx
/linux/tools/arch/x86/lib/
A Dmemcpy_64.S36 movq %rdx, %rcx
54 movq %rdx, %rcx
62 cmpq $0x20, %rdx
71 subq $0x20, %rdx
73 subq $0x20, %rdx
97 addq %rdx, %rsi
98 addq %rdx, %rdi
99 subq $0x20, %rdx
106 subq $0x20, %rdx
123 subq %rdx, %rsi
[all …]
A Dmemset_64.S32 movq %rdx,%rcx
63 movq %rdx,%rcx
83 movq %rdx,%rcx
131 cmpq $7,%rdx
137 subq %r8,%rdx
/linux/arch/x86/crypto/
A Dsm4-aesni-avx-asm_64.S158 vmovdqu 0*16(%rdx), RA0;
164 vmovdqu 1*16(%rdx), RA1;
166 vmovdqu 2*16(%rdx), RA2;
169 vmovdqu 3*16(%rdx), RA3;
559 vmovdqu 0 * 16(%rdx), RA1;
560 vmovdqu 1 * 16(%rdx), RA2;
561 vmovdqu 2 * 16(%rdx), RA3;
562 vmovdqu 3 * 16(%rdx), RB0;
563 vmovdqu 4 * 16(%rdx), RB1;
564 vmovdqu 5 * 16(%rdx), RB2;
[all …]
A Dsm4-aesni-avx2-asm_64.S370 vpxor (0 * 32)(%rdx), RA0, RA0;
371 vpxor (1 * 32)(%rdx), RA1, RA1;
372 vpxor (2 * 32)(%rdx), RA2, RA2;
409 vmovdqu (0 * 32)(%rdx), RA0;
410 vmovdqu (1 * 32)(%rdx), RA1;
411 vmovdqu (2 * 32)(%rdx), RA2;
412 vmovdqu (3 * 32)(%rdx), RA3;
413 vmovdqu (4 * 32)(%rdx), RB0;
414 vmovdqu (5 * 32)(%rdx), RB1;
415 vmovdqu (6 * 32)(%rdx), RB2;
[all …]
A Dcast5-avx-x86_64-asm_64.S63 #define RGI1 %rdx
449 movq %rdx, %r12;
451 vmovdqu (0*16)(%rdx), RL1;
452 vmovdqu (1*16)(%rdx), RR1;
453 vmovdqu (2*16)(%rdx), RL2;
454 vmovdqu (3*16)(%rdx), RR2;
455 vmovdqu (4*16)(%rdx), RL3;
456 vmovdqu (5*16)(%rdx), RR3;
457 vmovdqu (6*16)(%rdx), RL4;
458 vmovdqu (7*16)(%rdx), RR4;
[all …]
A Dchacha-avx512vl-x86_64.S30 # %rdx: up to 2 data blocks input, i
117 vpxord 0x00(%rdx),%xmm7,%xmm6
124 vpxord 0x10(%rdx),%xmm7,%xmm6
131 vpxord 0x20(%rdx),%xmm7,%xmm6
138 vpxord 0x30(%rdx),%xmm7,%xmm6
146 vpxord 0x40(%rdx),%xmm7,%xmm6
152 vpxord 0x50(%rdx),%xmm7,%xmm6
158 vpxord 0x60(%rdx),%xmm7,%xmm6
164 vpxord 0x70(%rdx),%xmm7,%xmm6
329 vpxord 0x00(%rdx),%xmm10,%xmm9
[all …]
A Dchacha-avx2-x86_64.S144 vpxor 0x00(%rdx),%xmm7,%xmm6
151 vpxor 0x10(%rdx),%xmm7,%xmm6
158 vpxor 0x20(%rdx),%xmm7,%xmm6
165 vpxor 0x30(%rdx),%xmm7,%xmm6
173 vpxor 0x40(%rdx),%xmm7,%xmm6
179 vpxor 0x50(%rdx),%xmm7,%xmm6
185 vpxor 0x60(%rdx),%xmm7,%xmm6
191 vpxor 0x70(%rdx),%xmm7,%xmm6
211 lea (%rdx,%rax),%rsi
516 lea (%rdx,%rax),%rsi
[all …]
A Dchacha-ssse3-x86_64.S139 movdqu 0x00(%rdx),%xmm4
147 movdqu 0x10(%rdx),%xmm0
155 movdqu 0x20(%rdx),%xmm0
163 movdqu 0x30(%rdx),%xmm0
184 lea (%rdx,%rax),%rsi
654 movdqu 0x00(%rdx),%xmm1
661 movdqu 0x10(%rdx),%xmm1
668 movdqu 0x20(%rdx),%xmm1
675 movdqu 0x30(%rdx),%xmm1
682 movdqu 0x40(%rdx),%xmm1
[all …]
/linux/arch/x86/mm/
A Dmem_encrypt_boot.S44 movq %rdx, %r12 /* Area length */
55 movq %r8, %rdx /* Pagetables used for encryption */
96 mov %rdx, %cr3
99 mov %cr4, %rdx
100 andq $~X86_CR4_PGE, %rdx
101 mov %rdx, %cr4
102 orq $X86_CR4_PGE, %rdx
103 mov %rdx, %cr4
115 mov %rdx, %r15 /* Save original PAT value */
148 mov %r15, %rdx /* Restore original PAT value */
/linux/arch/x86/power/
A Dhibernate_asm_64.S34 movq %rax, %rdx
35 andq $~(X86_CR4_PGE), %rdx
36 movq %rdx, %cr4; # turn off PGE
49 movq pt_regs_dx(%rax), %rdx
80 movq %rdx, pt_regs_dx(%rax)
112 movq restore_pblist(%rip), %rdx
132 testq %rdx, %rdx
136 movq pbe_address(%rdx), %rsi
137 movq pbe_orig_address(%rdx), %rdi
143 movq pbe_next(%rdx), %rdx
/linux/arch/x86/entry/
A Dentry_64.S1109 pushq %rdx
1129 movq %rsp, %rdx
1139 PUSH_AND_CLEAR_REGS rdx=(%rdx)
1212 cmpq 8(%rsp), %rdx
1215 cmpq 8(%rsp), %rdx
1239 lea 6*8(%rsp), %rdx
1241 cmpq %rdx, 4*8(%rsp)
1246 cmpq %rdx, 4*8(%rsp)
1265 pushq %rdx
1274 popq %rdx
[all …]
A Dcalling.h66 .macro PUSH_REGS rdx=%rdx rax=%rax save_ret=0
75 pushq \rdx /* pt_regs->dx */
117 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rax=%rax save_ret=0
118 PUSH_REGS rdx=\rdx, rax=\rax, save_ret=\save_ret
143 popq %rdx
A Dentry_64_compat.S87 pushq %rdx /* pt_regs->dx */
227 pushq %rdx /* pt_regs->dx */
275 popq %rdx /* Skip pt_regs->cx */
276 popq %rdx /* pt_regs->dx */
382 pushq %rdx /* pt_regs->dx */
/linux/arch/x86/kernel/
A Dsev_verify_cbit.S38 movq %rsi, %rdx
39 andq $(~X86_CR4_PGE), %rdx
40 movq %rdx, %cr4
49 1: rdrand %rdx
53 movq %rdx, sev_check_data(%rip)
65 cmpq %rdx, sev_check_data(%rip)
A Dftrace_64.S83 movq %rdx, RDX(%rsp)
94 movq MCOUNT_REG_SIZE-8(%rsp), %rdx
96 movq %rbp, %rdx
98 movq %rdx, RBP(%rsp)
124 movq RDX(%rsp), %rdx
149 movq function_trace_op(%rip), %rdx
196 movq function_trace_op(%rip), %rdx
314 movq %rdx, 8(%rsp)
320 movq 8(%rsp), %rdx
A Drelocate_kernel_64.S115 pushq %rdx
197 popq %rdx
200 call *%rdx
271 movq %rdi, %rdx
279 movq %rdx, %rsi
283 movq %rdx, %rdi
/linux/tools/testing/selftests/kvm/x86_64/
A Dxen_vmcall_test.c38 unsigned long rdx = ARGVALUE(3); in guest_code() local
47 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code()
69 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code()
76 rdx = 0x5a5a5a5a; /* ingpa (badly aligned) */ in guest_code()
79 "a"(rax), "c"(rcx), "d"(rdx), in guest_code()
/linux/tools/testing/selftests/sgx/
A Dtest_encl_bootstrap.S59 xor %rdx, %rdx
68 add %rdx, %rdx # OF = SF = AF = CF = 0; ZF = PF = 1
/linux/arch/x86/kernel/acpi/
A Dwakeup_64.S21 movq $0x123456789abcdef0, %rdx
22 cmpq %rdx, %rax
61 movq %rdx, pt_regs_dx(%rax)
108 movq pt_regs_dx(%rax), %rdx

Completed in 38 milliseconds

12345