Lines Matching refs:lock
178 #define __arch_exchange_and_add_body(lock, pfx, mem, value) \ argument
182 __asm __volatile (lock "xaddb %b0, %1" \
187 __asm __volatile (lock "xaddw %w0, %1" \
192 __asm __volatile (lock "xaddl %0, %1" \
197 __asm __volatile (lock "xaddq %q0, %1" \
217 #define __arch_add_body(lock, pfx, apfx, mem, value) \ argument
224 __asm __volatile (lock "addb %b1, %0" \
229 __asm __volatile (lock "addw %w1, %0" \
234 __asm __volatile (lock "addl %1, %0" \
239 __asm __volatile (lock "addq %q1, %0" \
306 #define __arch_increment_body(lock, pfx, mem) \ argument
309 __asm __volatile (lock "incb %b0" \
314 __asm __volatile (lock "incw %w0" \
319 __asm __volatile (lock "incl %0" \
324 __asm __volatile (lock "incq %q0" \
364 #define __arch_decrement_body(lock, pfx, mem) \ argument
367 __asm __volatile (lock "decb %b0" \
372 __asm __volatile (lock "decw %w0" \
377 __asm __volatile (lock "decl %0" \
382 __asm __volatile (lock "decq %q0" \
470 #define __arch_and_body(lock, mem, mask) \ argument
473 __asm __volatile (lock "andb %b1, %0" \
478 __asm __volatile (lock "andw %w1, %0" \
483 __asm __volatile (lock "andl %1, %0" \
488 __asm __volatile (lock "andq %q1, %0" \
504 #define __arch_or_body(lock, mem, mask) \ argument
507 __asm __volatile (lock "orb %b1, %0" \
512 __asm __volatile (lock "orw %w1, %0" \
517 __asm __volatile (lock "orl %1, %0" \
522 __asm __volatile (lock "orq %q1, %0" \