1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007  Maciej W. Rozycki
9 */
10#include <asm/asm.h>
11#include <asm/asm-offsets.h>
12#include <asm/regdef.h>
13
14#if LONGSIZE == 4
15#define LONG_S_L swl
16#define LONG_S_R swr
17#else
18#define LONG_S_L sdl
19#define LONG_S_R sdr
20#endif
21
22#define EX(insn,reg,addr,handler)			\
239:	insn	reg, addr;				\
24	.section __ex_table,"a"; 			\
25	PTR	9b, handler; 				\
26	.previous
27
28	.macro	f_fill64 dst, offset, val, fixup
29	EX(LONG_S, \val, (\offset +  0 * LONGSIZE)(\dst), \fixup)
30	EX(LONG_S, \val, (\offset +  1 * LONGSIZE)(\dst), \fixup)
31	EX(LONG_S, \val, (\offset +  2 * LONGSIZE)(\dst), \fixup)
32	EX(LONG_S, \val, (\offset +  3 * LONGSIZE)(\dst), \fixup)
33	EX(LONG_S, \val, (\offset +  4 * LONGSIZE)(\dst), \fixup)
34	EX(LONG_S, \val, (\offset +  5 * LONGSIZE)(\dst), \fixup)
35	EX(LONG_S, \val, (\offset +  6 * LONGSIZE)(\dst), \fixup)
36	EX(LONG_S, \val, (\offset +  7 * LONGSIZE)(\dst), \fixup)
37#if LONGSIZE == 4
38	EX(LONG_S, \val, (\offset +  8 * LONGSIZE)(\dst), \fixup)
39	EX(LONG_S, \val, (\offset +  9 * LONGSIZE)(\dst), \fixup)
40	EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
41	EX(LONG_S, \val, (\offset + 11 * LONGSIZE)(\dst), \fixup)
42	EX(LONG_S, \val, (\offset + 12 * LONGSIZE)(\dst), \fixup)
43	EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
44	EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
45	EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
46#endif
47	.endm
48
49/*
50 * memset(void *s, int c, size_t n)
51 *
52 * a0: start of area to clear
53 * a1: char to fill with
54 * a2: size of area to clear
55 */
56	.set	noreorder
57	.align	5
58LEAF(memset)
59	beqz		a1, 1f
60	 move		v0, a0			/* result */
61
62	andi		a1, 0xff		/* spread fillword */
63	LONG_SLL		t1, a1, 8
64	or		a1, t1
65	LONG_SLL		t1, a1, 16
66#if LONGSIZE == 8
67	or		a1, t1
68	LONG_SLL		t1, a1, 32
69#endif
70	or		a1, t1
711:
72
73FEXPORT(__bzero)
74	sltiu		t0, a2, LONGSIZE	/* very small region? */
75	bnez		t0, .Lsmall_memset
76	 andi		t0, a0, LONGMASK	/* aligned? */
77
78#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
79	beqz		t0, 1f
80	 PTR_SUBU	t0, LONGSIZE		/* alignment in bytes */
81#else
82	.set		noat
83	li		AT, LONGSIZE
84	beqz		t0, 1f
85	 PTR_SUBU	t0, AT			/* alignment in bytes */
86	.set		at
87#endif
88
89	R10KCBARRIER(0(ra))
90#ifdef __MIPSEB__
91	EX(LONG_S_L, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
92#endif
93#ifdef __MIPSEL__
94	EX(LONG_S_R, a1, (a0), .Lfirst_fixup)	/* make word/dword aligned */
95#endif
96	PTR_SUBU	a0, t0			/* long align ptr */
97	PTR_ADDU	a2, t0			/* correct size */
98
991:	ori		t1, a2, 0x3f		/* # of full blocks */
100	xori		t1, 0x3f
101	beqz		t1, .Lmemset_partial	/* no block to fill */
102	 andi		t0, a2, 0x40-LONGSIZE
103
104	PTR_ADDU	t1, a0			/* end address */
105	.set		reorder
1061:	PTR_ADDIU	a0, 64
107	R10KCBARRIER(0(ra))
108	f_fill64 a0, -64, a1, .Lfwd_fixup
109	bne		t1, a0, 1b
110	.set		noreorder
111
112.Lmemset_partial:
113	R10KCBARRIER(0(ra))
114	PTR_LA		t1, 2f			/* where to start */
115#if LONGSIZE == 4
116	PTR_SUBU	t1, t0
117#else
118	.set		noat
119	LONG_SRL		AT, t0, 1
120	PTR_SUBU	t1, AT
121	.set		at
122#endif
123	jr		t1
124	 PTR_ADDU	a0, t0			/* dest ptr */
125
126	.set		push
127	.set		noreorder
128	.set		nomacro
129	f_fill64 a0, -64, a1, .Lpartial_fixup	/* ... but first do longs ... */
1302:	.set		pop
131	andi		a2, LONGMASK		/* At most one long to go */
132
133	beqz		a2, 1f
134	 PTR_ADDU	a0, a2			/* What's left */
135	R10KCBARRIER(0(ra))
136#ifdef __MIPSEB__
137	EX(LONG_S_R, a1, -1(a0), .Llast_fixup)
138#endif
139#ifdef __MIPSEL__
140	EX(LONG_S_L, a1, -1(a0), .Llast_fixup)
141#endif
1421:	jr		ra
143	 move		a2, zero
144
145.Lsmall_memset:
146	beqz		a2, 2f
147	 PTR_ADDU	t1, a0, a2
148
1491:	PTR_ADDIU	a0, 1			/* fill bytewise */
150	R10KCBARRIER(0(ra))
151	bne		t1, a0, 1b
152	 sb		a1, -1(a0)
153
1542:	jr		ra			/* done */
155	 move		a2, zero
156	END(memset)
157
158.Lfirst_fixup:
159	jr	ra
160	 nop
161
162.Lfwd_fixup:
163	PTR_L		t0, TI_TASK($28)
164	andi		a2, 0x3f
165	LONG_L		t0, THREAD_BUADDR(t0)
166	LONG_ADDU	a2, t1
167	jr		ra
168	 LONG_SUBU	a2, t0
169
170.Lpartial_fixup:
171	PTR_L		t0, TI_TASK($28)
172	andi		a2, LONGMASK
173	LONG_L		t0, THREAD_BUADDR(t0)
174	LONG_ADDU	a2, t1
175	jr		ra
176	 LONG_SUBU	a2, t0
177
178.Llast_fixup:
179	jr		ra
180	 andi		v1, a2, LONGMASK
181