Home
last modified time | relevance | path

Searched refs:arch_spin_is_locked (Results 1 – 19 of 19) sorted by relevance

/linux-6.1.9/include/linux/
Dspinlock_up.h27 #define arch_spin_is_locked(x) ((x)->slock == 0) macro
62 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
Dspinlock.h115 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
/linux-6.1.9/include/asm-generic/
Dspinlock.h71 static __always_inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
87 return !arch_spin_is_locked(&lock); in arch_spin_value_unlocked()
Dqspinlock.h143 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
Dqrwlock.h132 return arch_spin_is_locked(&lock->wait_lock); in queued_rwlock_is_contended()
/linux-6.1.9/tools/include/linux/
Dspinlock.h35 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
/linux-6.1.9/arch/hexagon/include/asm/
Dspinlock.h156 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-6.1.9/arch/alpha/include/asm/
Dspinlock.h17 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-6.1.9/arch/sh/include/asm/
Dspinlock-cas.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
Dspinlock-llsc.h18 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
/linux-6.1.9/arch/parisc/include/asm/
Dspinlock.h10 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
/linux-6.1.9/arch/sparc/include/asm/
Dspinlock_32.h16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
/linux-6.1.9/arch/s390/include/asm/
Dspinlock.h53 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
/linux-6.1.9/arch/powerpc/include/asm/
Dsimple_spinlock.h39 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-6.1.9/arch/arm/include/asm/
Dspinlock.h119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-6.1.9/arch/arc/include/asm/
Dspinlock.h13 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
/linux-6.1.9/arch/ia64/include/asm/
Dspinlock.h101 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-6.1.9/arch/x86/kernel/
Dhpet.c812 if (arch_spin_is_locked(&old.lock)) in read_hpet()
844 } while ((new.value == old.value) && arch_spin_is_locked(&new.lock)); in read_hpet()
/linux-6.1.9/kernel/rcu/
Dtree.c774 if (rcu_rdp_cpu_online(rdp) || arch_spin_is_locked(&rcu_state.ofl_lock)) in rcu_lockdep_current_cpu_online()