/glibc-2.36/sysdeps/s390/s390-32/ |
D | s390-mcount.S | 62 stm %r14,%r5,96(%r15) 63 cfi_offset (r14, -128) 66 la %r3,0(%r14) # callees address = second parameter 69 bras %r14,0f 71 0: al %r14,0(%r14) 72 l %r14,__mcount_internal@GOT(%r14) 74 bras %r14,0f 76 0: l %r14,0(%r14) 78 basr %r14,%r14 84 lm %r14,%r5,96(%r15) [all …]
|
D | dl-trampoline.h | 77 stm %r14,%r15,CFA_OFF+R14_OFF(%r15) 78 cfi_offset (r14, R14_OFF) 110 0: l %r14,1f-0b(%r1) 111 bas %r14,0(%r14,%r1) # call _dl_fixup 120 lm %r14,%r15,FRAME_OFF+R14_OFF(%r15) # restore frame and registers 190 st %r14,CFA_OFF+R14_OFF(%r15) 191 cfi_offset (r14, R14_OFF) 225 lr %r4,%r14 # return address as third parm 227 0: l %r14,6f-0b(%r1) 230 bas %r14,0(%r14,%r1) # call resolver [all …]
|
/glibc-2.36/sysdeps/unix/sysv/linux/arm/ |
D | swapcontext.S | 31 push {r0,r1,r3,r14} 36 cfi_rel_offset (r14,12) 41 pop {r0,r1,r3,r14} 46 cfi_restore (r14) 52 RETINSTR(ne, r14) 56 str r14,[r0, #MCONTEXT_ARM_LR] 57 str r14,[r0, #MCONTEXT_ARM_PC]
|
D | setcontext.S | 78 add r14, r4, #MCONTEXT_ARM_R0 79 ldmia r14, {r0-r12} 80 ldr r13, [r14, #(MCONTEXT_ARM_SP - MCONTEXT_ARM_R0)] 81 add r14, r14, #(MCONTEXT_ARM_LR - MCONTEXT_ARM_R0) 82 ldmia r14, {r14, pc}
|
/glibc-2.36/sysdeps/x86_64/fpu/multiarch/ |
D | svml_s_hypotf16_core_avx512.S | 115 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 zmm2 139 # LOE rbx r12 r13 r14 r15 edx zmm2 142 # LOE rbx r12 r13 r14 r15 eax edx 153 movq %r14, (%rsp) 185 movq (%rsp), %r14 197 # LOE rbx r12 r13 r14 r15 zmm2 205 vmovss 64(%rsp, %r14, 4), %xmm0 206 vmovss 128(%rsp, %r14, 4), %xmm1 208 # LOE rbx r14 r15 r12d r13d xmm0 210 vmovss %xmm0, 192(%rsp, %r14, 4)
|
D | svml_s_hypotf8_core_avx2.S | 139 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 ymm2 163 # LOE rbx r12 r13 r14 r15 edx ymm2 166 # LOE rbx r12 r13 r14 r15 eax edx 177 movq %r14, (%rsp) 209 movq (%rsp), %r14 221 # LOE rbx r12 r13 r14 r15 ymm2 229 vmovss 32(%rsp, %r14, 4), %xmm0 230 vmovss 64(%rsp, %r14, 4), %xmm1 232 # LOE rbx r14 r15 r12d r13d xmm0 234 vmovss %xmm0, 96(%rsp, %r14, 4)
|
D | svml_d_hypot8_core_avx512.S | 107 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 zmm2 131 # LOE rbx r12 r13 r14 r15 edx zmm2 134 # LOE rbx r12 r13 r14 r15 eax edx 145 movq %r14, (%rsp) 177 movq (%rsp), %r14 189 # LOE rbx r12 r13 r14 r15 zmm2 197 vmovsd 64(%rsp, %r14, 8), %xmm0 198 vmovsd 128(%rsp, %r14, 8), %xmm1 200 # LOE rbx r14 r15 r12d r13d xmm0 202 vmovsd %xmm0, 192(%rsp, %r14, 8)
|
D | svml_d_hypot4_core_avx2.S | 145 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 ymm2 168 # LOE rbx r12 r13 r14 r15 edx ymm0 171 # LOE rbx r12 r13 r14 r15 eax edx 182 movq %r14, (%rsp) 214 movq (%rsp), %r14 226 # LOE rbx r12 r13 r14 r15 ymm0 234 vmovsd 32(%rsp, %r14, 8), %xmm0 235 vmovsd 64(%rsp, %r14, 8), %xmm1 237 # LOE rbx r14 r15 r12d r13d xmm0 239 vmovsd %xmm0, 96(%rsp, %r14, 8)
|
D | svml_s_exp2f16_core_avx512.S | 123 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 146 # LOE rbx r12 r13 r14 r15 edx zmm1 149 # LOE rbx r12 r13 r14 r15 eax edx 160 movq %r14, (%rsp) 192 movq (%rsp), %r14 204 # LOE rbx r12 r13 r14 r15 zmm1 212 vmovss 64(%rsp, %r14, 4), %xmm0 214 # LOE rbx r14 r15 r12d r13d xmm0 216 vmovss %xmm0, 128(%rsp, %r14, 4)
|
D | svml_s_exp2f8_core_avx2.S | 102 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 125 # LOE rbx r12 r13 r14 r15 edx ymm1 128 # LOE rbx r12 r13 r14 r15 eax edx 139 movq %r14, (%rsp) 171 movq (%rsp), %r14 183 # LOE rbx r12 r13 r14 r15 ymm1 191 vmovss 32(%rsp, %r14, 4), %xmm0 193 # LOE rbx r14 r15 r12d r13d xmm0 195 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_log2f16_core_avx512.S | 72 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 95 # LOE rbx r12 r13 r14 r15 edx zmm1 98 # LOE rbx r12 r13 r14 r15 eax edx 109 movq %r14, (%rsp) 141 movq (%rsp), %r14 153 # LOE rbx r12 r13 r14 r15 zmm1 161 vmovss 64(%rsp, %r14, 4), %xmm0 163 # LOE rbx r14 r15 r12d r13d xmm0 165 vmovss %xmm0, 128(%rsp, %r14, 4)
|
D | svml_s_hypotf4_core_sse4.S | 147 # LOE rbx rbp r12 r13 r14 r15 edx xmm0 xmm1 xmm2 168 # LOE rbx rbp r12 r13 r14 r15 edx 177 movq %r14, (%rsp) 208 movq (%rsp), %r14 217 # LOE rbx rbp r12 r13 r14 r15 xmm2 225 movss 32(%rsp, %r14, 4), %xmm0 226 movss 48(%rsp, %r14, 4), %xmm1 228 # LOE rbx rbp r14 r15 r12d r13d xmm0 230 movss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_log10f16_core_avx512.S | 75 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 98 # LOE rbx r12 r13 r14 r15 edx zmm1 101 # LOE rbx r12 r13 r14 r15 eax edx 112 movq %r14, (%rsp) 144 movq (%rsp), %r14 156 # LOE rbx r12 r13 r14 r15 zmm1 164 vmovss 64(%rsp, %r14, 4), %xmm0 166 # LOE rbx r14 r15 r12d r13d xmm0 168 vmovss %xmm0, 128(%rsp, %r14, 4)
|
D | svml_s_log2f8_core_avx2.S | 83 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 106 # LOE rbx r12 r13 r14 r15 edx ymm1 109 # LOE rbx r12 r13 r14 r15 eax edx 120 movq %r14, (%rsp) 152 movq (%rsp), %r14 164 # LOE rbx r12 r13 r14 r15 ymm1 172 vmovss 32(%rsp, %r14, 4), %xmm0 174 # LOE rbx r14 r15 r12d r13d xmm0 176 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_d_hypot2_core_sse4.S | 146 # LOE rbx rbp r12 r13 r14 r15 edx xmm0 xmm1 xmm2 167 # LOE rbx rbp r12 r13 r14 r15 edx 176 movq %r14, (%rsp) 207 movq (%rsp), %r14 216 # LOE rbx rbp r12 r13 r14 r15 xmm2 224 movsd 32(%rsp, %r14, 8), %xmm0 225 movsd 48(%rsp, %r14, 8), %xmm1 227 # LOE rbx rbp r14 r15 r12d r13d xmm0 229 movsd %xmm0, 64(%rsp, %r14, 8)
|
D | svml_s_atan2f8_core_avx2.S | 136 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm9 ymm10 ymm12 ymm13 151 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm7 ymm9 175 # LOE rbx r12 r13 r14 r15 edx ymm9 178 # LOE rbx r12 r13 r14 r15 eax edx 189 movq %r14, (%rsp) 221 movq (%rsp), %r14 233 # LOE rbx r12 r13 r14 r15 ymm9 241 vmovss 32(%rsp, %r14, 4), %xmm0 242 vmovss 64(%rsp, %r14, 4), %xmm1 244 # LOE rbx r14 r15 r12d r13d xmm0 [all …]
|
D | svml_s_log10f8_core_avx2.S | 88 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 111 # LOE rbx r12 r13 r14 r15 edx ymm1 114 # LOE rbx r12 r13 r14 r15 eax edx 125 movq %r14, (%rsp) 157 movq (%rsp), %r14 169 # LOE rbx r12 r13 r14 r15 ymm1 177 vmovss 32(%rsp, %r14, 4), %xmm0 179 # LOE rbx r14 r15 r12d r13d xmm0 181 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_exp2f4_core_sse4.S | 107 # LOE rbx rbp r12 r13 r14 r15 edx xmm0 xmm1 127 # LOE rbx rbp r12 r13 r14 r15 edx 136 movq %r14, (%rsp) 167 movq (%rsp), %r14 176 # LOE rbx rbp r12 r13 r14 r15 xmm1 184 movss 32(%rsp, %r14, 4), %xmm0 186 # LOE rbx rbp r14 r15 r12d r13d xmm0 188 movss %xmm0, 48(%rsp, %r14, 4)
|
D | svml_s_log1pf8_core_avx2.S | 105 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm3 127 # LOE rbx r12 r13 r14 r15 edx ymm0 130 # LOE rbx r12 r13 r14 r15 eax edx 141 movq %r14, (%rsp) 173 movq (%rsp), %r14 185 # LOE rbx r12 r13 r14 r15 ymm0 193 vmovss 32(%rsp, %r14, 4), %xmm0 195 # LOE rbx r14 r15 r12d r13d xmm0 197 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_sinhf8_core_avx2.S | 158 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm2 180 # LOE rbx r12 r13 r14 r15 edx ymm0 183 # LOE rbx r12 r13 r14 r15 eax edx 194 movq %r14, (%rsp) 226 movq (%rsp), %r14 238 # LOE rbx r12 r13 r14 r15 ymm0 246 vmovss 32(%rsp, %r14, 4), %xmm0 248 # LOE rbx r14 r15 r12d r13d xmm0 250 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_acosf8_core_avx2.S | 113 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm5 135 # LOE rbx r12 r13 r14 r15 edx ymm0 138 # LOE rbx r12 r13 r14 r15 eax edx 149 movq %r14, (%rsp) 181 movq (%rsp), %r14 193 # LOE rbx r12 r13 r14 r15 ymm0 201 vmovss 32(%rsp, %r14, 4), %xmm0 203 # LOE rbx r14 r15 r12d r13d xmm0 205 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_asinf8_core_avx2.S | 102 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm4 124 # LOE rbx r12 r13 r14 r15 edx ymm0 127 # LOE rbx r12 r13 r14 r15 eax edx 138 movq %r14, (%rsp) 170 movq (%rsp), %r14 182 # LOE rbx r12 r13 r14 r15 ymm0 190 vmovss 32(%rsp, %r14, 4), %xmm0 192 # LOE rbx r14 r15 r12d r13d xmm0 194 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_s_coshf8_core_avx2.S | 157 # LOE rbx r12 r13 r14 r15 edx ymm0 ymm1 179 # LOE rbx r12 r13 r14 r15 edx ymm0 182 # LOE rbx r12 r13 r14 r15 eax edx 193 movq %r14, (%rsp) 225 movq (%rsp), %r14 237 # LOE rbx r12 r13 r14 r15 ymm0 245 vmovss 32(%rsp, %r14, 4), %xmm0 247 # LOE rbx r14 r15 r12d r13d xmm0 249 vmovss %xmm0, 64(%rsp, %r14, 4)
|
D | svml_d_exp28_core_avx512.S | 141 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 164 # LOE rbx r12 r13 r14 r15 edx zmm1 167 # LOE rbx r12 r13 r14 r15 eax edx 178 movq %r14, (%rsp) 210 movq (%rsp), %r14 222 # LOE rbx r12 r13 r14 r15 zmm1 230 vmovsd 64(%rsp, %r14, 8), %xmm0 232 # LOE rbx r14 r15 r12d r13d xmm0 234 vmovsd %xmm0, 128(%rsp, %r14, 8)
|
D | svml_s_exp10f16_core_avx512.S | 100 # LOE rbx r12 r13 r14 r15 edx zmm0 zmm1 123 # LOE rbx r12 r13 r14 r15 edx zmm1 126 # LOE rbx r12 r13 r14 r15 eax edx 137 movq %r14, (%rsp) 169 movq (%rsp), %r14 181 # LOE rbx r12 r13 r14 r15 zmm1 189 vmovss 64(%rsp, %r14, 4), %xmm0 191 # LOE rbx r14 r15 r12d r13d xmm0 193 vmovss %xmm0, 128(%rsp, %r14, 4)
|