Searched refs:arch_spin_is_locked (Results 1 – 19 of 19) sorted by relevance
27 #define arch_spin_is_locked(x) ((x)->slock == 0) macro62 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
115 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
71 static __always_inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function87 return !arch_spin_is_locked(&lock); in arch_spin_value_unlocked()
143 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
132 return arch_spin_is_locked(&lock->wait_lock); in queued_rwlock_is_contended()
35 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
156 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
17 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
18 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
10 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
53 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
39 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
13 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
101 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
812 if (arch_spin_is_locked(&old.lock)) in read_hpet()844 } while ((new.value == old.value) && arch_spin_is_locked(&new.lock)); in read_hpet()
774 if (rcu_rdp_cpu_online(rdp) || arch_spin_is_locked(&rcu_state.ofl_lock)) in rcu_lockdep_current_cpu_online()