Home
last modified time | relevance | path

Searched refs:VEC (Results 1 – 13 of 13) sorted by relevance

/sysdeps/x86_64/multiarch/
A Dmemmove-vec-unaligned-erms.S800 LOAD_ONE_SET((%rsi), 0, %VEC(0), %VEC(1), %VEC(2), %VEC(3))
801 LOAD_ONE_SET((%rsi), PAGE_SIZE, %VEC(4), %VEC(5), %VEC(6), %VEC(7))
804 STORE_ONE_SET((%rdi), 0, %VEC(0), %VEC(1), %VEC(2), %VEC(3))
805 STORE_ONE_SET((%rdi), PAGE_SIZE, %VEC(4), %VEC(5), %VEC(6), %VEC(7))
871 LOAD_ONE_SET((%rsi), 0, %VEC(0), %VEC(1), %VEC(2), %VEC(3))
872 LOAD_ONE_SET((%rsi), PAGE_SIZE, %VEC(4), %VEC(5), %VEC(6), %VEC(7))
873 LOAD_ONE_SET((%rsi), PAGE_SIZE * 2, %VEC(8), %VEC(9), %VEC(10), %VEC(11))
874 LOAD_ONE_SET((%rsi), PAGE_SIZE * 3, %VEC(12), %VEC(13), %VEC(14), %VEC(15))
877 STORE_ONE_SET((%rdi), 0, %VEC(0), %VEC(1), %VEC(2), %VEC(3))
878 STORE_ONE_SET((%rdi), PAGE_SIZE, %VEC(4), %VEC(5), %VEC(6), %VEC(7))
[all …]
A Dmemset-vec-unaligned-erms.S137 VMOVU %VEC(0), -VEC_SIZE(%rdi,%rdx)
138 VMOVU %VEC(0), (%rdi)
193 VMOVU %VEC(0), (%rax)
194 VMOVU %VEC(0), -VEC_SIZE(%rax, %rdx)
204 VMOVU %VEC(0), (VEC_SIZE * -2)(%rdi)
205 VMOVU %VEC(0), (VEC_SIZE * -1)(%rdi)
238 vmovdqu8 %VEC(0), (%rax){%k1}
283 VMOVU %VEC(0), (%rax)
284 VMOVU %VEC(0), VEC_SIZE(%rax)
296 VMOVU %VEC(0), (VEC_SIZE * 2)(%rax)
[all …]
A Dmemset-avx512-unaligned-erms.S11 # define VEC(i) VEC##i macro
A Dmemset-evex-unaligned-erms.S11 # define VEC(i) VEC##i macro
A Dmemmove-avx512-unaligned-erms.S23 # define VEC(i) VEC##i macro
A Dmemmove-evex-unaligned-erms.S23 # define VEC(i) VEC##i macro
A Dmemmove-avx-unaligned-erms.S3 # define VEC(i) ymm##i macro
A Dmemmove-avx-unaligned-erms-rtm.S3 # define VEC(i) ymm##i macro
A Dmemset-avx2-unaligned-erms.S8 # define VEC(i) ymm##i macro
/sysdeps/x86_64/
A Ddl-trampoline.S57 #define VEC(i) zmm##i macro
61 #undef VEC
67 #define VEC(i) ymm##i macro
71 #undef VEC
78 #define VEC(i) xmm##i macro
83 #undef VEC
A Ddl-trampoline.h233 VMOVA %VEC(0), (LR_VECTOR_OFFSET)(%rsp)
234 VMOVA %VEC(1), (LR_VECTOR_OFFSET + VECTOR_SIZE)(%rsp)
235 VMOVA %VEC(2), (LR_VECTOR_OFFSET + VECTOR_SIZE*2)(%rsp)
236 VMOVA %VEC(3), (LR_VECTOR_OFFSET + VECTOR_SIZE*3)(%rsp)
237 VMOVA %VEC(4), (LR_VECTOR_OFFSET + VECTOR_SIZE*4)(%rsp)
238 VMOVA %VEC(5), (LR_VECTOR_OFFSET + VECTOR_SIZE*5)(%rsp)
286 2: VMOVA (LR_VECTOR_OFFSET)(%rsp), %VEC(0)
436 VMOVA %VEC(0), LRV_VECTOR0_OFFSET(%rcx)
437 VMOVA %VEC(1), LRV_VECTOR1_OFFSET(%rcx)
466 VMOVA LRV_VECTOR0_OFFSET(%rsp), %VEC(0)
[all …]
A Dmemset.S27 #define VEC(i) xmm##i macro
A Dmemmove.S22 #define VEC(i) xmm##i macro

Completed in 12 milliseconds