1/* atomic.S: Move this stuff here for better ICACHE hit rates.
2 *
3 * Copyright (C) 1996 David S. Miller (davem@caipfs.rutgers.edu)
4 */
5
6#include <linux/config.h>
7#include <asm/cprefix.h>
8#include <asm/ptrace.h>
9#include <asm/psr.h>
10
11	.text
12	.align	4
13
14	.globl  __atomic_begin
15__atomic_begin:
16
17#ifndef CONFIG_SMP
18	.globl	___xchg32_sun4c
19___xchg32_sun4c:
20	rd	%psr, %g3
21	andcc	%g3, PSR_PIL, %g0
22	bne	1f
23	 nop
24	wr	%g3, PSR_PIL, %psr
25	nop; nop; nop
261:
27	andcc	%g3, PSR_PIL, %g0
28	ld	[%g1], %g7
29	bne	1f
30	 st	%g2, [%g1]
31	wr	%g3, 0x0, %psr
32	nop; nop; nop
331:
34	mov	%g7, %g2
35	jmpl	%o7 + 8, %g0
36	 mov	%g4, %o7
37
38	.globl	___xchg32_sun4md
39___xchg32_sun4md:
40	swap	[%g1], %g2
41	jmpl	%o7 + 8, %g0
42	 mov	%g4, %o7
43#endif
44
45	/* Read asm-sparc/atomic.h carefully to understand how this works for SMP.
46	 * Really, some things here for SMP are overly clever, go read the header.
47	 */
48	.globl	___atomic_add
49___atomic_add:
50	rd	%psr, %g3		! Keep the code small, old way was stupid
51	nop; nop; nop;			! Let the bits set
52	or	%g3, PSR_PIL, %g7	! Disable interrupts
53	wr	%g7, 0x0, %psr		! Set %psr
54	nop; nop; nop;			! Let the bits set
55#ifdef CONFIG_SMP
561:	ldstub	[%g1 + 3], %g7		! Spin on the byte lock for SMP.
57	orcc	%g7, 0x0, %g0		! Did we get it?
58	bne	1b			! Nope...
59	 ld	[%g1], %g7		! Load locked atomic_t
60	sra	%g7, 8, %g7		! Get signed 24-bit integer
61	add	%g7, %g2, %g2		! Add in argument
62	sll	%g2, 8, %g7		! Transpose back to atomic_t
63	st	%g7, [%g1]		! Clever: This releases the lock as well.
64#else
65	ld	[%g1], %g7		! Load locked atomic_t
66	add	%g7, %g2, %g2		! Add in argument
67	st	%g2, [%g1]		! Store it back
68#endif
69	wr	%g3, 0x0, %psr		! Restore original PSR_PIL
70	nop; nop; nop;			! Let the bits set
71	jmpl	%o7, %g0		! NOTE: not + 8, see callers in atomic.h
72	 mov	%g4, %o7		! Restore %o7
73
74	.globl	___atomic_sub
75___atomic_sub:
76	rd	%psr, %g3		! Keep the code small, old way was stupid
77	nop; nop; nop;			! Let the bits set
78	or	%g3, PSR_PIL, %g7	! Disable interrupts
79	wr	%g7, 0x0, %psr		! Set %psr
80	nop; nop; nop;			! Let the bits set
81#ifdef CONFIG_SMP
821:	ldstub	[%g1 + 3], %g7		! Spin on the byte lock for SMP.
83	orcc	%g7, 0x0, %g0		! Did we get it?
84	bne	1b			! Nope...
85	 ld	[%g1], %g7		! Load locked atomic_t
86	sra	%g7, 8, %g7		! Get signed 24-bit integer
87	sub	%g7, %g2, %g2		! Subtract argument
88	sll	%g2, 8, %g7		! Transpose back to atomic_t
89	st	%g7, [%g1]		! Clever: This releases the lock as well
90#else
91	ld	[%g1], %g7		! Load locked atomic_t
92	sub	%g7, %g2, %g2		! Subtract argument
93	st	%g2, [%g1]		! Store it back
94#endif
95	wr	%g3, 0x0, %psr		! Restore original PSR_PIL
96	nop; nop; nop;			! Let the bits set
97	jmpl	%o7, %g0		! NOTE: not + 8, see callers in atomic.h
98	 mov	%g4, %o7		! Restore %o7
99
100	.globl  __atomic_end
101__atomic_end:
102