1/*
2 * arch/ppc/boot/common/util.S
3 *
4 * Useful bootup functions, which are more easily done in asm than C.
5 *
6 * NOTE:  Be very very careful about the registers you use here.
7 *	We don't follow any ABI calling convention among the
8 *	assembler functions that call each other, especially early
9 *	in the initialization.  Please preserve at least r3 and r4
10 *	for these early functions, as they often contain information
11 *	passed from boot roms into the C decompress function.
12 *
13 * Author: Tom Rini
14 *	   trini@mvista.com
15 * Derived from arch/ppc/boot/prep/head.S (Cort Dougan, many others).
16 *
17 * 2001-2003 (c) MontaVista, Software, Inc.  This file is licensed under
18 * the terms of the GNU General Public License version 2.  This program
19 * is licensed "as is" without any warranty of any kind, whether express
20 * or implied.
21 */
22
23#include <asm/processor.h>
24#include <asm/cache.h>
25#include <asm/ppc_asm.h>
26
27
28	.text
29
30	.globl	disable_6xx_mmu
31disable_6xx_mmu:
32	/* Establish default MSR value, exception prefix 0xFFF.
33	 * If necessary, this function must fix up the LR if we
34	 * return to a different address space once the MMU is
35	 * disabled.
36	 */
37	li	r8,MSR_IP|MSR_FP
38	mtmsr	r8
39
40	/* Clear BATs */
41	li	r8,0
42	mtspr	DBAT0U,r8
43	mtspr	DBAT0L,r8
44	mtspr	DBAT1U,r8
45	mtspr	DBAT1L,r8
46	mtspr	DBAT2U,r8
47	mtspr	DBAT2L,r8
48	mtspr	DBAT3U,r8
49	mtspr	DBAT3L,r8
50	mtspr	IBAT0U,r8
51	mtspr	IBAT0L,r8
52	mtspr	IBAT1U,r8
53	mtspr	IBAT1L,r8
54	mtspr	IBAT2U,r8
55	mtspr	IBAT2L,r8
56	mtspr	IBAT3U,r8
57	mtspr	IBAT3L,r8
58	isync
59	sync
60	sync
61
62	/* Set segment registers */
63	li	r8,16		/* load up segment register values */
64	mtctr	r8		/* for context 0 */
65	lis	r8,0x2000	/* Ku = 1, VSID = 0 */
66	li	r10,0
673:	mtsrin	r8,r10
68	addi	r8,r8,0x111	/* increment VSID */
69	addis	r10,r10,0x1000	/* address of next segment */
70	bdnz	3b
71
72	.globl	disable_6xx_l1cache
73disable_6xx_l1cache:
74	/* Enable, invalidate and then disable the L1 icache/dcache. */
75	li	r8,0
76	ori	r8,r8,(HID0_ICE|HID0_DCE|HID0_ICFI|HID0_DCI)
77	mfspr	r11,HID0
78	or	r11,r11,r8
79	andc	r10,r11,r8
80	isync
81	mtspr	HID0,r8
82	sync
83	isync
84	mtspr	HID0,r10
85	sync
86	isync
87	blr
88
89	.globl	_setup_L2CR
90_setup_L2CR:
91/*
92 * We should be skipping this section on CPUs where this results in an
93 * illegal instruction.  If not, please send trini@kernel.crashing.org
94 * the PVR of your CPU.
95 */
96	/* Invalidate/disable L2 cache */
97	sync
98	isync
99	mfspr	r8,L2CR
100	rlwinm	r8,r8,0,1,31
101	oris	r8,r8,L2CR_L2I@h
102	sync
103	isync
104	mtspr	L2CR,r8
105	sync
106	isync
107
108	/* Wait for the invalidation to complete */
109	mfspr   r8,PVR
110	srwi    r8,r8,16
111	cmpli	cr0,r8,0x8000			/* 7450 */
112	cmpli	cr1,r8,0x8001			/* 7455 */
113	cmpli	cr2,r8,0x8002			/* 7457 */
114	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq	/* Now test if any are true. */
115	cror	4*cr0+eq,4*cr0+eq,4*cr2+eq
116	bne     2f
117
1181:	mfspr	r8,L2CR		/* On 745x, poll L2I bit (bit 10) */
119	rlwinm.	r9,r8,0,10,10
120	bne	1b
121	b	3f
122
1232:      mfspr   r8,L2CR		/* On 75x & 74[01]0, poll L2IP bit (bit 31) */
124	rlwinm. r9,r8,0,31,31
125	bne     2b
126
1273:	rlwinm	r8,r8,0,11,9	/* Turn off L2I bit */
128	sync
129	isync
130	mtspr	L2CR,r8
131	sync
132	isync
133	blr
134
135	.globl	_setup_L3CR
136_setup_L3CR:
137	/* Invalidate/disable L3 cache */
138	sync
139	isync
140	mfspr	r8,L3CR
141	rlwinm	r8,r8,0,1,31
142	ori	r8,r8,L3CR_L3I@l
143	sync
144	isync
145	mtspr	L3CR,r8
146	sync
147	isync
148
149	/* Wait for the invalidation to complete */
1501:	mfspr	r8,L3CR
151	rlwinm.	r9,r8,0,21,21
152	bne	1b
153
154	rlwinm	r8,r8,0,22,20		/* Turn off L3I bit */
155	sync
156	isync
157	mtspr	L3CR,r8
158	sync
159	isync
160	blr
161
162
163/* udelay (on non-601 processors) needs to know the period of the
164 * timebase in nanoseconds.  This used to be hardcoded to be 60ns
165 * (period of 66MHz/4).  Now a variable is used that is initialized to
166 * 60 for backward compatibility, but it can be overridden as necessary
167 * with code something like this:
168 *    extern unsigned long timebase_period_ns;
169 *    timebase_period_ns = 1000000000 / bd->bi_tbfreq;
170 */
171	.data
172	.globl timebase_period_ns
173timebase_period_ns:
174	.long	60
175
176	.text
177/*
178 * Delay for a number of microseconds
179 */
180	.globl	udelay
181udelay:
182	mfspr	r4,PVR
183	srwi	r4,r4,16
184	cmpi	0,r4,1		/* 601 ? */
185	bne	.udelay_not_601
18600:	li	r0,86	/* Instructions / microsecond? */
187	mtctr	r0
18810:	addi	r0,r0,0 /* NOP */
189	bdnz	10b
190	subic.	r3,r3,1
191	bne	00b
192	blr
193
194.udelay_not_601:
195	mulli	r4,r3,1000	/* nanoseconds */
196	/*  Change r4 to be the number of ticks using:
197	 *	(nanoseconds + (timebase_period_ns - 1 )) / timebase_period_ns
198	 *  timebase_period_ns defaults to 60 (16.6MHz) */
199	lis	r5,timebase_period_ns@ha
200	lwz	r5,timebase_period_ns@l(r5)
201	add	r4,r4,r5
202	addi	r4,r4,-1
203	divw	r4,r4,r5	/* BUS ticks */
2041:	mftbu	r5
205	mftb	r6
206	mftbu	r7
207	cmp	0,r5,r7
208	bne	1b		/* Get [synced] base time */
209	addc	r9,r6,r4	/* Compute end time */
210	addze	r8,r5
2112:	mftbu	r5
212	cmp	0,r5,r8
213	blt	2b
214	bgt	3f
215	mftb	r6
216	cmp	0,r6,r9
217	blt	2b
2183:	blr
219
220	.section ".relocate_code","xa"
221/*
222 * Flush and enable instruction cache
223 * First, flush the data cache in case it was enabled and may be
224 * holding instructions for copy back.
225 */
226_GLOBAL(flush_instruction_cache)
227	mflr	r6
228	bl	flush_data_cache
229
230#ifdef CONFIG_8xx
231	lis	r3, IDC_INVALL@h
232	mtspr	IC_CST, r3
233	lis	r3, IDC_ENABLE@h
234	mtspr	IC_CST, r3
235	lis	r3, IDC_DISABLE@h
236	mtspr	DC_CST, r3
237#elif CONFIG_4xx
238	lis	r3,start@h		# r9 = &_start
239	lis	r4,_etext@ha
240	addi	r4,r4,_etext@l		# r8 = &_etext
2411:	dcbf	r0,r3			# Flush the data cache
242	icbi	r0,r3			# Invalidate the instruction cache
243	addi	r3,r3,0x10		# Increment by one cache line
244	cmplwi	cr0,r3,r4		# Are we at the end yet?
245	blt	1b			# No, keep flushing and invalidating
246#else
247	/* Enable, invalidate and then disable the L1 icache/dcache. */
248	li	r3,0
249	ori	r3,r3,(HID0_ICE|HID0_DCE|HID0_ICFI|HID0_DCI)
250	mfspr	r4,HID0
251	or	r5,r4,r3
252	isync
253	mtspr	HID0,r5
254	sync
255	isync
256	ori	r5,r4,HID0_ICE	/* Enable cache */
257	mtspr	HID0,r5
258	sync
259	isync
260#endif
261	mtlr	r6
262	blr
263
264#define NUM_CACHE_LINES 128*8
265#define cache_flush_buffer 0x1000
266
267/*
268 * Flush data cache
269 * Do this by just reading lots of stuff into the cache.
270 */
271_GLOBAL(flush_data_cache)
272	lis	r3,cache_flush_buffer@h
273	ori	r3,r3,cache_flush_buffer@l
274	li	r4,NUM_CACHE_LINES
275	mtctr	r4
27600:	lwz	r4,0(r3)
277	addi	r3,r3,L1_CACHE_BYTES	/* Next line, please */
278	bdnz	00b
27910:	blr
280
281	.previous
282
283