Lines Matching refs:d

40 	int d, z, z0;  in raid6_sse21_gen_syndrome()  local
51 for ( d = 0 ; d < bytes ; d += 16 ) { in raid6_sse21_gen_syndrome()
52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome()
53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome()
54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
66 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
76 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_gen_syndrome()
78 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse21_gen_syndrome()
92 int d, z, z0; in raid6_sse21_xor_syndrome() local
102 for ( d = 0 ; d < bytes ; d += 16 ) { in raid6_sse21_xor_syndrome()
103 asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d])); in raid6_sse21_xor_syndrome()
104 asm volatile("movdqa %0,%%xmm2" : : "m" (p[d])); in raid6_sse21_xor_syndrome()
113 asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d])); in raid6_sse21_xor_syndrome()
125 asm volatile("pxor %0,%%xmm4" : : "m" (q[d])); in raid6_sse21_xor_syndrome()
127 asm volatile("movdqa %%xmm4,%0" : "=m" (q[d])); in raid6_sse21_xor_syndrome()
128 asm volatile("movdqa %%xmm2,%0" : "=m" (p[d])); in raid6_sse21_xor_syndrome()
150 int d, z, z0; in raid6_sse22_gen_syndrome() local
163 for ( d = 0 ; d < bytes ; d += 32 ) { in raid6_sse22_gen_syndrome()
164 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse22_gen_syndrome()
165 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse22_gen_syndrome()
166 asm volatile("movdqa %0,%%xmm3" : : "m" (dptr[z0][d+16])); /* P[1] */ in raid6_sse22_gen_syndrome()
170 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse22_gen_syndrome()
179 asm volatile("movdqa %0,%%xmm5" : : "m" (dptr[z][d])); in raid6_sse22_gen_syndrome()
180 asm volatile("movdqa %0,%%xmm7" : : "m" (dptr[z][d+16])); in raid6_sse22_gen_syndrome()
188 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse22_gen_syndrome()
189 asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16])); in raid6_sse22_gen_syndrome()
190 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse22_gen_syndrome()
191 asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16])); in raid6_sse22_gen_syndrome()
203 int d, z, z0; in raid6_sse22_xor_syndrome() local
213 for ( d = 0 ; d < bytes ; d += 32 ) { in raid6_sse22_xor_syndrome()
214 asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d])); in raid6_sse22_xor_syndrome()
215 asm volatile("movdqa %0,%%xmm6" :: "m" (dptr[z0][d+16])); in raid6_sse22_xor_syndrome()
216 asm volatile("movdqa %0,%%xmm2" : : "m" (p[d])); in raid6_sse22_xor_syndrome()
217 asm volatile("movdqa %0,%%xmm3" : : "m" (p[d+16])); in raid6_sse22_xor_syndrome()
232 asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d])); in raid6_sse22_xor_syndrome()
233 asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16])); in raid6_sse22_xor_syndrome()
252 asm volatile("pxor %0,%%xmm4" : : "m" (q[d])); in raid6_sse22_xor_syndrome()
253 asm volatile("pxor %0,%%xmm6" : : "m" (q[d+16])); in raid6_sse22_xor_syndrome()
255 asm volatile("movdqa %%xmm4,%0" : "=m" (q[d])); in raid6_sse22_xor_syndrome()
256 asm volatile("movdqa %%xmm6,%0" : "=m" (q[d+16])); in raid6_sse22_xor_syndrome()
257 asm volatile("movdqa %%xmm2,%0" : "=m" (p[d])); in raid6_sse22_xor_syndrome()
258 asm volatile("movdqa %%xmm3,%0" : "=m" (p[d+16])); in raid6_sse22_xor_syndrome()
282 int d, z, z0; in raid6_sse24_gen_syndrome() local
304 for ( d = 0 ; d < bytes ; d += 64 ) { in raid6_sse24_gen_syndrome()
307 asm volatile("prefetchnta %0" :: "m" (dptr[z][d])); in raid6_sse24_gen_syndrome()
308 asm volatile("prefetchnta %0" :: "m" (dptr[z][d+32])); in raid6_sse24_gen_syndrome()
325 asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d])); in raid6_sse24_gen_syndrome()
326 asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16])); in raid6_sse24_gen_syndrome()
327 asm volatile("movdqa %0,%%xmm13" :: "m" (dptr[z][d+32])); in raid6_sse24_gen_syndrome()
328 asm volatile("movdqa %0,%%xmm15" :: "m" (dptr[z][d+48])); in raid6_sse24_gen_syndrome()
342 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse24_gen_syndrome()
344 asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16])); in raid6_sse24_gen_syndrome()
346 asm volatile("movntdq %%xmm10,%0" : "=m" (p[d+32])); in raid6_sse24_gen_syndrome()
348 asm volatile("movntdq %%xmm11,%0" : "=m" (p[d+48])); in raid6_sse24_gen_syndrome()
350 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse24_gen_syndrome()
352 asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16])); in raid6_sse24_gen_syndrome()
354 asm volatile("movntdq %%xmm12,%0" : "=m" (q[d+32])); in raid6_sse24_gen_syndrome()
356 asm volatile("movntdq %%xmm14,%0" : "=m" (q[d+48])); in raid6_sse24_gen_syndrome()
369 int d, z, z0; in raid6_sse24_xor_syndrome() local
379 for ( d = 0 ; d < bytes ; d += 64 ) { in raid6_sse24_xor_syndrome()
380 asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d])); in raid6_sse24_xor_syndrome()
381 asm volatile("movdqa %0,%%xmm6" :: "m" (dptr[z0][d+16])); in raid6_sse24_xor_syndrome()
382 asm volatile("movdqa %0,%%xmm12" :: "m" (dptr[z0][d+32])); in raid6_sse24_xor_syndrome()
383 asm volatile("movdqa %0,%%xmm14" :: "m" (dptr[z0][d+48])); in raid6_sse24_xor_syndrome()
384 asm volatile("movdqa %0,%%xmm2" : : "m" (p[d])); in raid6_sse24_xor_syndrome()
385 asm volatile("movdqa %0,%%xmm3" : : "m" (p[d+16])); in raid6_sse24_xor_syndrome()
386 asm volatile("movdqa %0,%%xmm10" : : "m" (p[d+32])); in raid6_sse24_xor_syndrome()
387 asm volatile("movdqa %0,%%xmm11" : : "m" (p[d+48])); in raid6_sse24_xor_syndrome()
394 asm volatile("prefetchnta %0" :: "m" (dptr[z][d])); in raid6_sse24_xor_syndrome()
395 asm volatile("prefetchnta %0" :: "m" (dptr[z][d+32])); in raid6_sse24_xor_syndrome()
416 asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d])); in raid6_sse24_xor_syndrome()
417 asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16])); in raid6_sse24_xor_syndrome()
418 asm volatile("movdqa %0,%%xmm13" :: "m" (dptr[z][d+32])); in raid6_sse24_xor_syndrome()
419 asm volatile("movdqa %0,%%xmm15" :: "m" (dptr[z][d+48])); in raid6_sse24_xor_syndrome()
429 asm volatile("prefetchnta %0" :: "m" (q[d])); in raid6_sse24_xor_syndrome()
430 asm volatile("prefetchnta %0" :: "m" (q[d+32])); in raid6_sse24_xor_syndrome()
454 asm volatile("movntdq %%xmm2,%0" : "=m" (p[d])); in raid6_sse24_xor_syndrome()
455 asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16])); in raid6_sse24_xor_syndrome()
456 asm volatile("movntdq %%xmm10,%0" : "=m" (p[d+32])); in raid6_sse24_xor_syndrome()
457 asm volatile("movntdq %%xmm11,%0" : "=m" (p[d+48])); in raid6_sse24_xor_syndrome()
458 asm volatile("pxor %0,%%xmm4" : : "m" (q[d])); in raid6_sse24_xor_syndrome()
459 asm volatile("pxor %0,%%xmm6" : : "m" (q[d+16])); in raid6_sse24_xor_syndrome()
460 asm volatile("pxor %0,%%xmm12" : : "m" (q[d+32])); in raid6_sse24_xor_syndrome()
461 asm volatile("pxor %0,%%xmm14" : : "m" (q[d+48])); in raid6_sse24_xor_syndrome()
462 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse24_xor_syndrome()
463 asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16])); in raid6_sse24_xor_syndrome()
464 asm volatile("movntdq %%xmm12,%0" : "=m" (q[d+32])); in raid6_sse24_xor_syndrome()
465 asm volatile("movntdq %%xmm14,%0" : "=m" (q[d+48])); in raid6_sse24_xor_syndrome()