1 #ifndef __ASM_SH_ATOMIC_IRQ_H
2 #define __ASM_SH_ATOMIC_IRQ_H
3 
4 /*
5  * To get proper branch prediction for the main line, we must branch
6  * forward to code at the end of this object's .text section, then
7  * branch back to restart the operation.
8  */
atomic_add(int i,atomic_t * v)9 static inline void atomic_add(int i, atomic_t *v)
10 {
11 	unsigned long flags;
12 
13 	raw_local_irq_save(flags);
14 	v->counter += i;
15 	raw_local_irq_restore(flags);
16 }
17 
atomic_sub(int i,atomic_t * v)18 static inline void atomic_sub(int i, atomic_t *v)
19 {
20 	unsigned long flags;
21 
22 	raw_local_irq_save(flags);
23 	v->counter -= i;
24 	raw_local_irq_restore(flags);
25 }
26 
atomic_add_return(int i,atomic_t * v)27 static inline int atomic_add_return(int i, atomic_t *v)
28 {
29 	unsigned long temp, flags;
30 
31 	raw_local_irq_save(flags);
32 	temp = v->counter;
33 	temp += i;
34 	v->counter = temp;
35 	raw_local_irq_restore(flags);
36 
37 	return temp;
38 }
39 
atomic_sub_return(int i,atomic_t * v)40 static inline int atomic_sub_return(int i, atomic_t *v)
41 {
42 	unsigned long temp, flags;
43 
44 	raw_local_irq_save(flags);
45 	temp = v->counter;
46 	temp -= i;
47 	v->counter = temp;
48 	raw_local_irq_restore(flags);
49 
50 	return temp;
51 }
52 
atomic_clear_mask(unsigned int mask,atomic_t * v)53 static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
54 {
55 	unsigned long flags;
56 
57 	raw_local_irq_save(flags);
58 	v->counter &= ~mask;
59 	raw_local_irq_restore(flags);
60 }
61 
atomic_set_mask(unsigned int mask,atomic_t * v)62 static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
63 {
64 	unsigned long flags;
65 
66 	raw_local_irq_save(flags);
67 	v->counter |= mask;
68 	raw_local_irq_restore(flags);
69 }
70 
71 #endif /* __ASM_SH_ATOMIC_IRQ_H */
72