Home
last modified time | relevance | path

Searched refs:VMOVA (Results 1 – 20 of 20) sorted by relevance

/sysdeps/x86_64/
A Ddl-trampoline.S56 #define VMOVA vmovdqa64 macro
62 #undef VMOVA
66 #define VMOVA vmovdqa macro
72 #undef VMOVA
77 #define VMOVA movaps macro
84 #undef VMOVA
A Ddl-trampoline.h233 VMOVA %VEC(0), (LR_VECTOR_OFFSET)(%rsp)
234 VMOVA %VEC(1), (LR_VECTOR_OFFSET + VECTOR_SIZE)(%rsp)
235 VMOVA %VEC(2), (LR_VECTOR_OFFSET + VECTOR_SIZE*2)(%rsp)
236 VMOVA %VEC(3), (LR_VECTOR_OFFSET + VECTOR_SIZE*3)(%rsp)
237 VMOVA %VEC(4), (LR_VECTOR_OFFSET + VECTOR_SIZE*4)(%rsp)
238 VMOVA %VEC(5), (LR_VECTOR_OFFSET + VECTOR_SIZE*5)(%rsp)
286 2: VMOVA (LR_VECTOR_OFFSET)(%rsp), %VEC(0)
436 VMOVA %VEC(0), LRV_VECTOR0_OFFSET(%rcx)
437 VMOVA %VEC(1), LRV_VECTOR1_OFFSET(%rcx)
466 VMOVA LRV_VECTOR0_OFFSET(%rsp), %VEC(0)
[all …]
A Dmemset.S29 #define VMOVA movaps macro
A Dmemmove.S27 #define VMOVA movaps macro
/sysdeps/x86_64/multiarch/
A Dstrchr-evex.S28 # define VMOVA vmovdqa64 macro
193 VMOVA (VEC_SIZE)(%rdi), %YMM1
205 VMOVA (VEC_SIZE * 2)(%rdi), %YMM1
213 VMOVA (VEC_SIZE * 3)(%rdi), %YMM1
223 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
239 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
240 VMOVA (VEC_SIZE * 5)(%rdi), %YMM2
241 VMOVA (VEC_SIZE * 6)(%rdi), %YMM3
242 VMOVA (VEC_SIZE * 7)(%rdi), %YMM4
344 VMOVA (%rdi), %YMM1
A Dmemmove-avx-unaligned-erms.S6 # define VMOVA vmovdqa macro
A Dstrrchr-evex.S28 # define VMOVA vmovdqa64 macro
105 VMOVA (%rdi), %YMM1
138 VMOVA (%rdi), %YMM1
150 VMOVA (%rdi), %YMM1
162 VMOVA (%rdi), %YMM1
174 VMOVA (%rdi), %YMM1
A Dstrcpy-evex.S31 # define VMOVA vmovdqa64 macro
120 VMOVA (%rsi, %rcx), %YMM2
122 VMOVA VEC_SIZE(%rsi, %rcx), %YMM2
227 VMOVA (%rsi), %YMM4
228 VMOVA VEC_SIZE(%rsi), %YMM5
248 VMOVA (%rsi), %YMM4
250 VMOVA VEC_SIZE(%rsi), %YMM5
843 VMOVA %YMMZERO, (%rdi)
854 VMOVA %YMMZERO, (%rdi)
859 VMOVA %YMMZERO, (%rdi)
[all …]
A Dmemmove-avx-unaligned-erms-rtm.S6 # define VMOVA vmovdqa macro
A Dmemmove-vec-unaligned-erms.S540 VMOVA %VEC(1), (%rdi)
541 VMOVA %VEC(2), VEC_SIZE(%rdi)
542 VMOVA %VEC(3), (VEC_SIZE * 2)(%rdi)
543 VMOVA %VEC(4), (VEC_SIZE * 3)(%rdi)
592 VMOVA %VEC(1), (VEC_SIZE * 3)(%rcx)
593 VMOVA %VEC(2), (VEC_SIZE * 2)(%rcx)
594 VMOVA %VEC(3), (VEC_SIZE * 1)(%rcx)
830 VMOVA %VEC(0), (%rdi)
831 VMOVA %VEC(1), VEC_SIZE(%rdi)
904 VMOVA %VEC(0), (%rdi)
[all …]
A Dmemset-avx2-unaligned-erms.S11 # define VMOVA vmovdqa macro
A Dmemset-avx512-unaligned-erms.S14 # define VMOVA vmovdqa64 macro
A Dmemset-evex-unaligned-erms.S14 # define VMOVA vmovdqa64 macro
A Dmemmove-avx512-unaligned-erms.S26 # define VMOVA vmovdqa64 macro
A Dmemmove-evex-unaligned-erms.S26 # define VMOVA vmovdqa64 macro
A Dmemset-vec-unaligned-erms.S322 VMOVA %VEC(0), LOOP_4X_OFFSET(%LOOP_REG)
323 VMOVA %VEC(0), (VEC_SIZE + LOOP_4X_OFFSET)(%LOOP_REG)
324 VMOVA %VEC(0), (VEC_SIZE * 2 + LOOP_4X_OFFSET)(%LOOP_REG)
325 VMOVA %VEC(0), (VEC_SIZE * 3 + LOOP_4X_OFFSET)(%LOOP_REG)
A Dstrlen-evex.S27 # define VMOVA vmovdqa64 macro
243 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
254 VMOVA (VEC_SIZE * 6)(%rdi), %YMM3
292 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
A Dstrcat-evex.S28 # define VMOVA vmovdqa64 macro
196 VMOVA (%rax), %YMM0
197 VMOVA (VEC_SIZE * 2)(%rax), %YMM1
A Dstrcmp-evex.S39 # define VMOVA vmovdqa64 macro
361 VMOVA (%rax), %YMM0
362 VMOVA VEC_SIZE(%rax), %YMM2
363 VMOVA (VEC_SIZE * 2)(%rax), %YMM4
364 VMOVA (VEC_SIZE * 3)(%rax), %YMM6
A Dmemrchr-evex.S23 # define VMOVA vmovdqa64 macro

Completed in 16 milliseconds