1/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
2 * atomic.S: These things are too big to do inline.
3 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
5 */
6
7#include <linux/config.h>
8#include <asm/asi.h>
9
10	/* On SMP we need to use memory barriers to ensure
11	 * correct memory operation ordering, nop these out
12	 * for uniprocessor.
13	 */
14#ifdef CONFIG_SMP
15#define ATOMIC_PRE_BARRIER	membar #StoreLoad | #LoadLoad
16#define ATOMIC_POST_BARRIER	membar #StoreLoad | #StoreStore
17#else
18#define ATOMIC_PRE_BARRIER	nop
19#define ATOMIC_POST_BARRIER	nop
20#endif
21
22	.text
23
24	.globl	atomic_impl_begin, atomic_impl_end
25atomic_impl_begin:
26	/* Two versions of the atomic routines, one that
27	 * does not return a value and does not perform
28	 * memory barriers, and a second which returns
29	 * a value and does the barriers.
30	 */
31	.globl	atomic_add
32	.type	atomic_add,#function
33atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
341:	lduw	[%o1], %g5
35	add	%g5, %o0, %g7
36	cas	[%o1], %g5, %g7
37	cmp	%g5, %g7
38	bne,pn	%icc, 1b
39	 nop
40	retl
41	 nop
42	.size	atomic_add, .-atomic_add
43
44	.globl	atomic_sub
45	.type	atomic_sub,#function
46atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
471:	lduw	[%o1], %g5
48	sub	%g5, %o0, %g7
49	cas	[%o1], %g5, %g7
50	cmp	%g5, %g7
51	bne,pn	%icc, 1b
52	 nop
53	retl
54	 nop
55	.size	atomic_sub, .-atomic_sub
56
57	.globl	atomic_add_ret
58	.type	atomic_add_ret,#function
59atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
60	ATOMIC_PRE_BARRIER
611:	lduw	[%o1], %g5
62	add	%g5, %o0, %g7
63	cas	[%o1], %g5, %g7
64	cmp	%g5, %g7
65	bne,pn	%icc, 1b
66	 add	%g7, %o0, %g7
67	ATOMIC_POST_BARRIER
68	retl
69	 sra	%g7, 0, %o0
70	.size	atomic_add_ret, .-atomic_add_ret
71
72	.globl	atomic_sub_ret
73	.type	atomic_sub_ret,#function
74atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
75	ATOMIC_PRE_BARRIER
761:	lduw	[%o1], %g5
77	sub	%g5, %o0, %g7
78	cas	[%o1], %g5, %g7
79	cmp	%g5, %g7
80	bne,pn	%icc, 1b
81	 sub	%g7, %o0, %g7
82	ATOMIC_POST_BARRIER
83	retl
84	 sra	%g7, 0, %o0
85	.size	atomic_sub_ret, .-atomic_sub_ret
86atomic_impl_end:
87