Lines Matching refs:s64
12 s64 __aligned(8) counter;
17 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
19 s64 val; in arch_atomic64_read()
29 static inline void arch_atomic64_set(atomic64_t *v, s64 a) in arch_atomic64_set()
50 static inline void arch_atomic64_##op(s64 a, atomic64_t *v) \
52 s64 val; \
67 static inline s64 arch_atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
69 s64 val; \
89 static inline s64 arch_atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
91 s64 val, orig; \
140 static inline s64 in ATOMIC64_OPS()
141 arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new) in ATOMIC64_OPS()
143 s64 prev; in ATOMIC64_OPS()
163 static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new) in arch_atomic64_xchg()
165 s64 prev; in arch_atomic64_xchg()
191 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
193 s64 val; in arch_atomic64_dec_if_positive()
224 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless()
226 s64 old, temp; in arch_atomic64_fetch_add_unless()