Lines Matching refs:old

84 static inline int arch_cmpxchg_niai8(int *lock, int old, int new)  in arch_cmpxchg_niai8()  argument
86 int expected = old; in arch_cmpxchg_niai8()
91 : "=d" (old), "=Q" (*lock) in arch_cmpxchg_niai8()
92 : "0" (old), "d" (new), "Q" (*lock) in arch_cmpxchg_niai8()
94 return expected == old; in arch_cmpxchg_niai8()
120 int lockval, ix, node_id, tail_id, old, new, owner, count; in arch_spin_lock_queued() local
131 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
132 if ((old & _Q_LOCK_CPU_MASK) == 0 && in arch_spin_lock_queued()
133 (old & _Q_LOCK_STEAL_MASK) != _Q_LOCK_STEAL_MASK) { in arch_spin_lock_queued()
141 new = (old ? (old + _Q_LOCK_STEAL_ADD) : 0) | lockval; in arch_spin_lock_queued()
142 if (__atomic_cmpxchg_bool(&lp->lock, old, new)) in arch_spin_lock_queued()
149 new = node_id | (old & _Q_LOCK_MASK); in arch_spin_lock_queued()
150 if (__atomic_cmpxchg_bool(&lp->lock, old, new)) in arch_spin_lock_queued()
154 tail_id = old & _Q_TAIL_MASK; in arch_spin_lock_queued()
161 owner = arch_spin_yield_target(old, node); in arch_spin_lock_queued()
173 owner = arch_spin_yield_target(old, node); in arch_spin_lock_queued()
182 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
183 owner = old & _Q_LOCK_CPU_MASK; in arch_spin_lock_queued()
185 tail_id = old & _Q_TAIL_MASK; in arch_spin_lock_queued()
187 if (__atomic_cmpxchg_bool(&lp->lock, old, new)) in arch_spin_lock_queued()
213 int lockval, old, new, owner, count; in arch_spin_lock_classic() local
224 old = arch_load_niai4(&lp->lock); in arch_spin_lock_classic()
225 owner = old & _Q_LOCK_CPU_MASK; in arch_spin_lock_classic()
228 new = (old & _Q_TAIL_MASK) | lockval; in arch_spin_lock_classic()
229 if (arch_cmpxchg_niai8(&lp->lock, old, new)) { in arch_spin_lock_classic()
292 int old; in arch_write_lock_wait() local
301 old = READ_ONCE(rw->cnts); in arch_write_lock_wait()
302 if ((old & 0x1ffff) == 0 && in arch_write_lock_wait()
303 __atomic_cmpxchg_bool(&rw->cnts, old, old | 0x10000)) in arch_write_lock_wait()