Home
last modified time | relevance | path

Searched refs:arch_spin_value_unlocked (Results 1 – 8 of 8) sorted by relevance

/linux-6.1.9/arch/powerpc/include/asm/
Dsimple_spinlock.h34 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
41 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-6.1.9/arch/arm/include/asm/
Dspinlock.h114 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
121 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-6.1.9/include/asm-generic/
Dspinlock.h85 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
Dqspinlock.h145 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l) macro
/linux-6.1.9/arch/alpha/include/asm/
Dspinlock.h19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-6.1.9/arch/s390/include/asm/
Dspinlock.h48 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-6.1.9/lib/
Dlockref.c16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
/linux-6.1.9/arch/ia64/include/asm/
Dspinlock.h96 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function