/sysdeps/unix/sysv/linux/aarch64/ |
A D | syscall.S | 35 mov x4, x5 36 mov x5, x6
|
A D | sysdep.h | 211 # define LOAD_ARGS_6(x0, x1, x2, x3, x4, x5) \ argument 212 long _x5tmp = (long) (x5); \ 215 # define LOAD_ARGS_7(x0, x1, x2, x3, x4, x5, x6)\ argument 217 LOAD_ARGS_6 (x0, x1, x2, x3, x4, x5) \
|
A D | clone.S | 59 mov x3, x5 /* tls */
|
/sysdeps/x86_64/ |
A D | tst-auditmod3a.c | 9 __m128i x4, __m128i x5, __m128i x6, __m128i x7) in audit_test() argument 18 || memcmp (&xmm, &x5, sizeof (xmm)) in audit_test()
|
A D | tst-auditmod10a.c | 27 __m512i x4, __m512i x5, __m512i x6, __m512i x7) in audit_test() argument 52 if (memcmp (&zmm, &x5, sizeof (zmm))) in audit_test()
|
A D | tst-auditmod4a.c | 10 __m256i x4, __m256i x5, __m256i x6, __m256i x7) in audit_test() argument 35 if (memcmp (&ymm, &x5, sizeof (ymm))) in audit_test()
|
A D | tst-auditmod5a.c | 9 __m128i x4, __m128i x5, __m128i x6, __m128i x7) in audit_test() argument 34 if (memcmp (&xmm, &x5, sizeof (xmm))) in audit_test()
|
A D | tst-auditmod6a.c | 9 __m128i x4, __m128i x5, __m128i x6, __m128i x7) in audit_test() argument 34 if (memcmp (&xmm, &x5, sizeof (xmm))) in audit_test()
|
A D | tst-avx512mod.c | 10 __m512i x4, __m512i x5, __m512i x6, __m512i x7) in avx512_test() argument 35 if (memcmp (&zmm, &x5, sizeof (zmm))) in avx512_test()
|
A D | tst-avxmod.c | 10 __m256i x4, __m256i x5, __m256i x6, __m256i x7) in avx_test() argument 35 if (memcmp (&ymm, &x5, sizeof (ymm))) in avx_test()
|
A D | tst-ssemod.c | 9 __m128i x4, __m128i x5, __m128i x6, __m128i x7) in sse_test() argument 34 if (memcmp (&xmm, &x5, sizeof (xmm))) in sse_test()
|
/sysdeps/ieee754/flt-32/ |
A D | sincosf_poly.h | 36 double x3, x4, x5, x6, s, c, c1, c2, s1; in sincosf_poly() local 49 x5 = x3 * x2; in sincosf_poly() 55 *sinp = s + x5 * s1; in sincosf_poly()
|
/sysdeps/aarch64/ |
A D | memset-reg.h | 25 #define tmp1 x5
|
A D | sysdep.h | 158 stp x4, x5, [sp, #48]; \ 160 cfi_rel_offset (x5, 56); \ 172 ldp x4, x5, [sp, #48]; \ 174 cfi_restore (x5); \
|
A D | dl-trampoline.S | 58 stp x4, x5, [sp, #32] 60 cfi_rel_offset (x5, 40) 111 ldp x4, x5, [sp, #32] 180 stp x4, x5, [x29, #OFFSET_RG + DL_OFFSET_RG_X0 + 16*2] 182 cfi_rel_offset (x5, OFFSET_RG + DL_OFFSET_RG_X0 + 16*2 + 8) 235 ldp x4, x5, [x29, #OFFSET_RG + DL_OFFSET_RG_X0 + 16*2] 281 ldp x4, x5, [x29, #OFFSET_RG + DL_OFFSET_RG_X0 + 16*2]
|
A D | __longjmp.S | 105 ldr x5, [x0, #JB_SP<<3] 107 mov sp, x5
|
A D | start.S | 53 mov x5, x0
|
A D | dl-tlsdesc.S | 198 stp x5, x6, [sp, #16*1] 205 cfi_rel_offset (x5, 16*1) 230 ldp x5, x6, [sp, #16*1]
|
A D | setjmp.S | 62 str x5, [x0, #JB_SP<<3]
|
A D | strcpy.S | 42 #define tmp x5 44 #define shift x5
|
A D | strcmp.S | 41 #define diff x5 42 #define off1 x5
|
A D | tst-vpcs-mod.S | 30 stp x4, x5, [x0, 32] 90 ldp x4, x5, [x1, 32]
|
A D | strnlen.S | 38 #define cntrem x5
|
/sysdeps/sparc/sparc64/multiarch/ |
A D | memcpy-niagara2.S | 54 faligndata %x4, %x5, %f8; \ 55 faligndata %x5, %x6, %f10; \ 79 #define FREG_MOVE_6(x0, x1, x2, x3, x4, x5) \ argument 85 fsrc2 %x5, %f10; 86 #define FREG_MOVE_7(x0, x1, x2, x3, x4, x5, x6) \ argument 92 fsrc2 %x5, %f10; \ 94 #define FREG_MOVE_8(x0, x1, x2, x3, x4, x5, x6, x7) \ argument 100 fsrc2 %x5, %f10; \ 123 #define FREG_LOAD_6(base, x0, x1, x2, x3, x4, x5) \ argument 129 LOAD(ldd, base + 0x28, %x5); [all …]
|
/sysdeps/aarch64/multiarch/ |
A D | strlen_asimd.S | 35 #define has_nul2 x5 37 #define tmp2 x5
|