1 /*
2 * bitops.c: atomic operations which got too long to be inlined all over
3 * the place.
4 *
5 * Copyright 1999 Philipp Rumpf (prumpf@tux.org)
6 * Copyright 2000 Grant Grundler (grundler@cup.hp.com)
7 */
8
9 #include <linux/config.h>
10 #include <linux/kernel.h>
11 #include <linux/spinlock.h>
12 #include <asm/system.h>
13 #include <asm/atomic.h>
14
15 #ifdef CONFIG_SMP
16 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
17 [0 ... (ATOMIC_HASH_SIZE-1)] = SPIN_LOCK_UNLOCKED
18 };
19 #endif
20
21 spinlock_t __atomic_lock = SPIN_LOCK_UNLOCKED;
22
23 #ifdef __LP64__
__xchg64(unsigned long x,unsigned long * ptr)24 unsigned long __xchg64(unsigned long x, unsigned long *ptr)
25 {
26 unsigned long temp, flags;
27
28 SPIN_LOCK_IRQSAVE(ATOMIC_HASH(ptr), flags);
29 temp = *ptr;
30 *ptr = x;
31 SPIN_UNLOCK_IRQRESTORE(ATOMIC_HASH(ptr), flags);
32 return temp;
33 }
34 #endif
35
__xchg32(int x,int * ptr)36 unsigned long __xchg32(int x, int *ptr)
37 {
38 unsigned long flags;
39 unsigned long temp;
40
41 SPIN_LOCK_IRQSAVE(ATOMIC_HASH(ptr), flags);
42 (long) temp = (long) *ptr; /* XXX - sign extension wanted? */
43 *ptr = x;
44 SPIN_UNLOCK_IRQRESTORE(ATOMIC_HASH(ptr), flags);
45 return temp;
46 }
47
48
__xchg8(char x,char * ptr)49 unsigned long __xchg8(char x, char *ptr)
50 {
51 unsigned long flags;
52 unsigned long temp;
53
54 SPIN_LOCK_IRQSAVE(ATOMIC_HASH(ptr), flags);
55 (long) temp = (long) *ptr; /* XXX - sign extension wanted? */
56 *ptr = x;
57 SPIN_UNLOCK_IRQRESTORE(ATOMIC_HASH(ptr), flags);
58 return temp;
59 }
60
61
62 #ifdef __LP64__
__cmpxchg_u64(volatile unsigned long * ptr,unsigned long old,unsigned long new)63 unsigned long __cmpxchg_u64(volatile unsigned long *ptr, unsigned long old, unsigned long new)
64 {
65 unsigned long flags;
66 unsigned long prev;
67
68 SPIN_LOCK_IRQSAVE(ATOMIC_HASH(ptr), flags);
69 if ((prev = *ptr) == old)
70 *ptr = new;
71 SPIN_UNLOCK_IRQRESTORE(ATOMIC_HASH(ptr), flags);
72 return prev;
73 }
74 #endif
75
__cmpxchg_u32(volatile unsigned int * ptr,unsigned int old,unsigned int new)76 unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
77 {
78 unsigned long flags;
79 unsigned int prev;
80
81 SPIN_LOCK_IRQSAVE(ATOMIC_HASH(ptr), flags);
82 if ((prev = *ptr) == old)
83 *ptr = new;
84 SPIN_UNLOCK_IRQRESTORE(ATOMIC_HASH(ptr), flags);
85 return (unsigned long)prev;
86 }
87