Home
last modified time | relevance | path

Searched refs:_Q_LOCKED_VAL (Results 1 – 6 of 6) sorted by relevance

/linux-5.19.10/kernel/locking/
Dqspinlock_paravirt.h91 (cmpxchg_acquire(&lock->locked, 0, _Q_LOCKED_VAL) == 0)) { in pv_hybrid_queued_unfair_trylock()
123 _Q_LOCKED_VAL) == _Q_PENDING_VAL); in trylock_clear_pending()
145 new = (val & ~_Q_PENDING_MASK) | _Q_LOCKED_VAL; in trylock_clear_pending()
462 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in pv_wait_head_or_lock()
485 return (u32)(atomic_read(&lock->val) | _Q_LOCKED_VAL); in pv_wait_head_or_lock()
556 locked = cmpxchg_release(&lock->locked, _Q_LOCKED_VAL, 0); in __pv_queued_spin_unlock()
557 if (likely(locked == _Q_LOCKED_VAL)) in __pv_queued_spin_unlock()
Dqspinlock.c164 WRITE_ONCE(lock->locked_pending, _Q_LOCKED_VAL); in clear_pending_set_locked()
208 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
264 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in set_locked()
539 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()
/linux-5.19.10/include/asm-generic/
Dqspinlock.h97 return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)); in queued_spin_trylock()
111 if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) in queued_spin_lock()
Dqspinlock_types.h92 #define _Q_LOCKED_VAL (1U << _Q_LOCKED_OFFSET) macro
/linux-5.19.10/arch/powerpc/include/asm/
Dqspinlock.h40 if (likely(arch_atomic_try_cmpxchg_lock(&lock->val, &val, _Q_LOCKED_VAL))) in queued_spin_lock()
/linux-5.19.10/arch/x86/include/asm/
Dqspinlock.h102 } while (atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL) != 0); in virt_spin_lock()