Home
last modified time | relevance | path

Searched refs:VMOVA (Results 1 – 25 of 25) sorted by relevance

/glibc-2.36/sysdeps/x86_64/
Ddl-trampoline.h226 VMOVA %xmm0, (LR_XMM_OFFSET + XMM_SIZE*0)(%rsp)
227 VMOVA %xmm1, (LR_XMM_OFFSET + XMM_SIZE*1)(%rsp)
228 VMOVA %xmm2, (LR_XMM_OFFSET + XMM_SIZE*2)(%rsp)
229 VMOVA %xmm3, (LR_XMM_OFFSET + XMM_SIZE*3)(%rsp)
230 VMOVA %xmm4, (LR_XMM_OFFSET + XMM_SIZE*4)(%rsp)
231 VMOVA %xmm5, (LR_XMM_OFFSET + XMM_SIZE*5)(%rsp)
232 VMOVA %xmm6, (LR_XMM_OFFSET + XMM_SIZE*6)(%rsp)
233 VMOVA %xmm7, (LR_XMM_OFFSET + XMM_SIZE*7)(%rsp)
237 VMOVA %VEC(0), (LR_VECTOR_OFFSET + VECTOR_SIZE*0)(%rsp)
238 VMOVA %VEC(1), (LR_VECTOR_OFFSET + VECTOR_SIZE*1)(%rsp)
[all …]
Ddl-trampoline.S57 #define VMOVA vmovdqa64 macro
64 #undef VMOVA
70 # define VMOVA vmovdqa macro
77 # undef VMOVA
85 # define VMOVA movaps macro
92 # undef VMOVA
/glibc-2.36/sysdeps/x86_64/multiarch/
Dstrcpy-evex.S34 # define VMOVA vmovdqa64 macro
123 VMOVA (%rsi, %rcx), %YMM2
125 VMOVA VEC_SIZE(%rsi, %rcx), %YMM2
141 VMOVA VEC_SIZE(%rsi, %rcx), %YMM3
157 VMOVA VEC_SIZE(%rsi, %rcx), %YMM4
173 VMOVA VEC_SIZE(%rsi, %rcx), %YMM2
189 VMOVA VEC_SIZE(%rsi, %rcx), %YMM2
204 VMOVA VEC_SIZE(%rsi, %rcx), %YMM3
230 VMOVA (%rsi), %YMM4
231 VMOVA VEC_SIZE(%rsi), %YMM5
[all …]
Dstrchr-evex.S30 # define VMOVA vmovdqa64 macro
205 VMOVA (VEC_SIZE)(%rdi), %YMM1
217 VMOVA (VEC_SIZE * 2)(%rdi), %YMM1
225 VMOVA (VEC_SIZE * 3)(%rdi), %YMM1
235 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
251 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
252 VMOVA (VEC_SIZE * 5)(%rdi), %YMM2
253 VMOVA (VEC_SIZE * 6)(%rdi), %YMM3
254 VMOVA (VEC_SIZE * 7)(%rdi), %YMM4
356 VMOVA (%rdi), %YMM1
Dmemmove-vec-unaligned-erms.S496 VMOVA %VEC(1), (%rdi)
497 VMOVA %VEC(2), VEC_SIZE(%rdi)
498 VMOVA %VEC(3), (VEC_SIZE * 2)(%rdi)
499 VMOVA %VEC(4), (VEC_SIZE * 3)(%rdi)
548 VMOVA %VEC(1), (VEC_SIZE * 3)(%rcx)
549 VMOVA %VEC(2), (VEC_SIZE * 2)(%rcx)
550 VMOVA %VEC(3), (VEC_SIZE * 1)(%rcx)
551 VMOVA %VEC(4), (VEC_SIZE * 0)(%rcx)
794 VMOVA %VEC(0), (%rdi)
795 VMOVA %VEC(1), VEC_SIZE(%rdi)
[all …]
Dstrlen-evex-base.S56 # define VMOVA vmovdqa64 macro
71 # define VMOVA vmovdqa32 macro
202 VMOVA (VEC_SIZE * 4)(%rax), %VMM1
204 VMOVA (VEC_SIZE * 6)(%rax), %VMM3
Dmemset-vec-unaligned-erms.S307 VMOVA %VEC(0), LOOP_4X_OFFSET(%LOOP_REG)
308 VMOVA %VEC(0), (VEC_SIZE + LOOP_4X_OFFSET)(%LOOP_REG)
309 VMOVA %VEC(0), (VEC_SIZE * 2 + LOOP_4X_OFFSET)(%LOOP_REG)
310 VMOVA %VEC(0), (VEC_SIZE * 3 + LOOP_4X_OFFSET)(%LOOP_REG)
Dstrlen-evex.S29 # define VMOVA vmovdqa64 macro
245 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
256 VMOVA (VEC_SIZE * 6)(%rdi), %YMM3
294 VMOVA (VEC_SIZE * 4)(%rdi), %YMM1
Dstrcmp-evex.S42 # define VMOVA vmovdqa64 macro
568 VMOVA (VEC_SIZE * 0)(%rdi), %YMM0
569 VMOVA (VEC_SIZE * 1)(%rdi), %YMM2
570 VMOVA (VEC_SIZE * 2)(%rdi), %YMM4
571 VMOVA (VEC_SIZE * 3)(%rdi), %YMM6
781 VMOVA (%rdi), %YMM0
895 VMOVA VEC_SIZE(%rdi), %YMM0
940 VMOVA (VEC_SIZE * 2)(%rdi), %YMM4
941 VMOVA (VEC_SIZE * 3)(%rdi), %YMM6
Dstrcat-evex.S31 # define VMOVA vmovdqa64 macro
199 VMOVA (%rax), %YMM0
200 VMOVA (VEC_SIZE * 2)(%rax), %YMM1
Dstrcmp-avx2.S45 # define VMOVA vmovdqa macro
547 VMOVA (VEC_SIZE * 0)(%rdi), %ymm0
548 VMOVA (VEC_SIZE * 1)(%rdi), %ymm2
549 VMOVA (VEC_SIZE * 2)(%rdi), %ymm4
550 VMOVA (VEC_SIZE * 3)(%rdi), %ymm6
750 VMOVA (%rdi), %ymm0
882 VMOVA (VEC_SIZE * 2)(%rdi), %ymm4
883 VMOVA (VEC_SIZE * 3)(%rdi), %ymm6
Dstrrchr-evex.S30 # define VMOVA vmovdqa64 macro
228 VMOVA (VEC_SIZE * 4)(%rdi), %YMM5
229 VMOVA (VEC_SIZE * 5)(%rdi), %YMM6
Devex-vecs-common.h32 #define VMOVA vmovdqa64 macro
Dsse2-vecs.h40 #define VMOVA movaps macro
Davx-vecs.h40 #define VMOVA vmovdqa macro
Dmemmove-avx-unaligned-erms.S9 # define VMOVA vmovdqa macro
Dmemmove-sse2-unaligned-erms.S33 # define VMOVA movaps macro
Dmemmove-avx-unaligned-erms-rtm.S6 # define VMOVA vmovdqa macro
Dmemset-evex-unaligned-erms.S17 # define VMOVA vmovdqa64 macro
Dmemset-avx512-unaligned-erms.S17 # define VMOVA vmovdqa64 macro
Dmemmove-evex-unaligned-erms.S29 # define VMOVA vmovdqa64 macro
Dmemset-sse2-unaligned-erms.S35 # define VMOVA movaps macro
Dmemmove-avx512-unaligned-erms.S29 # define VMOVA vmovdqa64 macro
Dmemset-avx2-unaligned-erms.S14 # define VMOVA vmovdqa macro
/glibc-2.36/ChangeLog.old/
DChangeLog.1823116 (VMOVA): Changed to movaps for smaller code sizes.
23339 a comment on VMOVU and VMOVA.