1/*
2	L2CR functions
3	Copyright � 1997-1998 by PowerLogix R & D, Inc.
4
5	This program is free software; you can redistribute it and/or modify
6	it under the terms of the GNU General Public License as published by
7	the Free Software Foundation; either version 2 of the License, or
8	(at your option) any later version.
9
10	This program is distributed in the hope that it will be useful,
11	but WITHOUT ANY WARRANTY; without even the implied warranty of
12	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13	GNU General Public License for more details.
14
15	You should have received a copy of the GNU General Public License
16	along with this program; if not, write to the Free Software
17	Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18*/
19/*
20	Thur, Dec. 12, 1998.
21	- First public release, contributed by PowerLogix.
22	***********
23	Sat, Aug. 7, 1999.
24	- Terry: Made sure code disabled interrupts before running. (Previously
25			it was assumed interrupts were already disabled).
26	- Terry: Updated for tentative G4 support.  4MB of memory is now flushed
27			instead of 2MB.  (Prob. only 3 is necessary).
28	- Terry: Updated for workaround to HID0[DPM] processor bug
29			during global invalidates.
30	***********
31	Thu, July 13, 2000.
32	- Terry: Added isync to correct for an errata.
33
34	22 August 2001.
35	- DanM: Finally added the 7450 patch I've had for the past
36		several months.  The L2CR is similar, but I'm going
37		to assume the user of this functions knows what they
38		are doing.
39
40	Author:	Terry Greeniaus (tgree@phys.ualberta.ca)
41	Please e-mail updates to this file to me, thanks!
42*/
43#include <linux/config.h>
44#include <asm/processor.h>
45#include <asm/cputable.h>
46#include <asm/ppc_asm.h>
47#include <asm/cache.h>
48
49/* Usage:
50
51	When setting the L2CR register, you must do a few special
52	things.  If you are enabling the cache, you must perform a
53	global invalidate.  If you are disabling the cache, you must
54	flush the cache contents first.  This routine takes care of
55	doing these things.  When first enabling the cache, make sure
56	you pass in the L2CR you want, as well as passing in the
57	global invalidate bit set.  A global invalidate will only be
58	performed if the L2I bit is set in applyThis.  When enabling
59	the cache, you should also set the L2E bit in applyThis.  If
60	you want to modify the L2CR contents after the cache has been
61	enabled, the recommended procedure is to first call
62	__setL2CR(0) to disable the cache and then call it again with
63	the new values for L2CR.  Examples:
64
65	_setL2CR(0)		- disables the cache
66	_setL2CR(0xB3A04000)	- enables my G3 upgrade card:
67				- L2E set to turn on the cache
68				- L2SIZ set to 1MB
69				- L2CLK set to 1:1
70				- L2RAM set to pipelined synchronous late-write
71				- L2I set to perform a global invalidation
72				- L2OH set to 0.5 nS
73				- L2DF set because this upgrade card
74				  requires it
75
76	A similar call should work for your card.  You need to know
77	the correct setting for your card and then place them in the
78	fields I have outlined above.  Other fields support optional
79	features, such as L2DO which caches only data, or L2TS which
80	causes cache pushes from the L1 cache to go to the L2 cache
81	instead of to main memory.
82
83IMPORTANT:
84	Starting with the 7450, the bits in this register have moved
85	or behave differently.  The Enable, Parity Enable, Size,
86	and L2 Invalidate are the only bits that have not moved.
87	The size is read-only for these processors with internal L2
88	cache, and the invalidate is a control as well as status.
89		-- Dan
90
91*/
92/*
93 * Summary: this procedure ignores the L2I bit in the value passed in,
94 * flushes the cache if it was already enabled, always invalidates the
95 * cache, then enables the cache if the L2E bit is set in the value
96 * passed in.
97 *   -- paulus.
98 */
99_GLOBAL(_set_L2CR)
100	/* Make sure this is a 750 or 7400 chip */
101BEGIN_FTR_SECTION
102	li	r3,-1
103	blr
104END_FTR_SECTION_IFCLR(CPU_FTR_L2CR)
105
106	mflr	r9
107
108	/* Stop DST streams */
109BEGIN_FTR_SECTION
110	DSSALL
111	sync
112END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
113
114	/* Turn off interrupts and data relocation. */
115	mfmsr	r7		/* Save MSR in r7 */
116	rlwinm	r4,r7,0,17,15
117	rlwinm	r4,r4,0,28,26	/* Turn off DR bit */
118	sync
119	mtmsr	r4
120	isync
121
122	/* Before we perform the global invalidation, we must disable dynamic
123	 * power management via HID0[DPM] to work around a processor bug where
124	 * DPM can possibly interfere with the state machine in the processor
125	 * that invalidates the L2 cache tags.
126	 */
127	mfspr	r8,HID0			/* Save HID0 in r8 */
128	rlwinm	r4,r8,0,12,10		/* Turn off HID0[DPM] */
129	sync
130	mtspr	HID0,r4			/* Disable DPM */
131	sync
132
133	/* Flush & disable L1 */
134	mr	r5,r3
135	bl	__flush_disable_L1
136	mr	r3,r5
137
138	/* Get the current enable bit of the L2CR into r4 */
139	mfspr	r4,L2CR
140
141	/* Tweak some bits */
142	rlwinm	r5,r3,0,0,0		/* r5 contains the new enable bit */
143	rlwinm	r3,r3,0,11,9		/* Turn off the invalidate bit */
144	rlwinm	r3,r3,0,1,31		/* Turn off the enable bit */
145
146	/* Check to see if we need to flush */
147	rlwinm.	r4,r4,0,0,0
148	beq	2f
149
150	/* Flush the cache. First, read the first 4MB of memory (physical) to
151	 * put new data in the cache.  (Actually we only need
152	 * the size of the L2 cache plus the size of the L1 cache, but 4MB will
153	 * cover everything just to be safe).
154	 */
155
156	 /**** Might be a good idea to set L2DO here - to prevent instructions
157	       from getting into the cache.  But since we invalidate
158	       the next time we enable the cache it doesn't really matter.
159	       Don't do this unless you accomodate all processor variations.
160	       The bit moved on the 7450.....
161	  ****/
162
163	/* TODO: use HW flush assist when available */
164
165	lis	r4,0x0002
166	mtctr	r4
167	li	r4,0
1681:
169	lwzx	r0,r0,r4
170	addi	r4,r4,32		/* Go to start of next cache line */
171	bdnz	1b
172	isync
173
174	/* Now, flush the first 4MB of memory */
175	lis	r4,0x0002
176	mtctr	r4
177	li	r4,0
178	sync
1791:
180	dcbf	0,r4
181	addi	r4,r4,32		/* Go to start of next cache line */
182	bdnz	1b
183
1842:
185	/* Set up the L2CR configuration bits (and switch L2 off) */
186	/* CPU errata: Make sure the mtspr below is already in the
187	 * L1 icache
188	 */
189	b	20f
190	.balign	L1_CACHE_LINE_SIZE
19122:
192	sync
193	mtspr	L2CR,r3
194	sync
195	b	23f
19620:
197	b	21f
19821:	sync
199	isync
200	b	22b
201
20223:
203	/* Perform a global invalidation */
204	oris	r3,r3,0x0020
205	sync
206	mtspr	L2CR,r3
207	sync
208	isync				/* For errata */
209
210BEGIN_FTR_SECTION
211	/* On the 7450, we wait for the L2I bit to clear......
212	*/
21310:	mfspr	r3,L2CR
214	andis.	r4,r3,0x0020
215	bne	10b
216	b	11f
217END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
218
219	/* Wait for the invalidation to complete */
2203:	mfspr	r3,L2CR
221	rlwinm.	r4,r3,0,31,31
222	bne	3b
223
22411:	rlwinm	r3,r3,0,11,9		/* Turn off the L2I bit */
225	sync
226	mtspr	L2CR,r3
227	sync
228
229	/* See if we need to enable the cache */
230	cmplwi	r5,0
231	beq	4f
232
233	/* Enable the cache */
234	oris	r3,r3,0x8000
235	mtspr	L2CR,r3
236	sync
237
2384:
239	bl	__inval_enable_L1
240
241	/* Restore HID0[DPM] to whatever it was before */
242	sync
243	mtspr	1008,r8
244	sync
245
246	/* Restore MSR (restores EE and DR bits to original state) */
247	SYNC
248	mtmsr	r7
249	isync
250
251	mtlr	r9
252	blr
253
254_GLOBAL(_get_L2CR)
255	/* Return the L2CR contents */
256	li	r3,0
257BEGIN_FTR_SECTION
258	mfspr	r3,L2CR
259END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
260	blr
261
262
263/*
264 * Here is a similar routine for dealing with the L3 cache
265 * on the 745x family of chips
266 */
267
268_GLOBAL(_set_L3CR)
269	/* Make sure this is a 745x chip */
270BEGIN_FTR_SECTION
271	li	r3,-1
272	blr
273END_FTR_SECTION_IFCLR(CPU_FTR_L3CR)
274
275	/* Stop DST streams */
276	DSSALL
277	sync
278
279	/* Turn off interrupts and data relocation. */
280	mfmsr	r7		/* Save MSR in r7 */
281	rlwinm	r4,r7,0,17,15
282	rlwinm	r4,r4,0,28,26	/* Turn off DR bit */
283	sync
284	mtmsr	r4
285	isync
286
287	/* Get the current enable bit of the L3CR into r4 */
288	mfspr	r4,SPRN_L3CR
289
290	/* Tweak some bits */
291	rlwinm	r5,r3,0,0,0		/* r5 contains the new enable bit */
292	rlwinm	r3,r3,0,22,20		/* Turn off the invalidate bit */
293	rlwinm	r3,r3,0,1,31		/* Turn off the enable bit */
294	rlwinm	r3,r3,0,5,3		/* Turn off the clken bit */
295	/* Check to see if we need to flush */
296	rlwinm.	r4,r4,0,0,0
297	beq	2f
298
299	/* Flush the cache.
300	 */
301
302	/* TODO: use HW flush assist */
303
304	lis	r4,0x0008
305	mtctr	r4
306	li	r4,0
3071:
308	lwzx	r0,r0,r4
309	dcbf	0,r4
310	addi	r4,r4,32		/* Go to start of next cache line */
311	bdnz	1b
312
3132:
314	/* Set up the L3CR configuration bits (and switch L3 off) */
315	sync
316	mtspr	SPRN_L3CR,r3
317	sync
318
319	oris	r3,r3,L3CR_L3RES@h		/* Set reserved bit 5 */
320	mtspr	SPRN_L3CR,r3
321	sync
322	oris	r3,r3,L3CR_L3CLKEN@h		/* Set clken */
323	mtspr	SPRN_L3CR,r3
324	sync
325
326	/* Wait for stabilize */
327	li	r0,256
328	mtctr	r0
3291:	bdnz	1b
330
331	/* Perform a global invalidation */
332	ori	r3,r3,0x0400
333	sync
334	mtspr	SPRN_L3CR,r3
335	sync
336	isync
337
338	/* We wait for the L3I bit to clear...... */
33910:	mfspr	r3,SPRN_L3CR
340	andi.	r4,r3,0x0400
341	bne	10b
342
343	/* Clear CLKEN */
344	rlwinm	r3,r3,0,5,3		/* Turn off the clken bit */
345	mtspr	SPRN_L3CR,r3
346	sync
347
348	/* Wait for stabilize */
349	li	r0,256
350	mtctr	r0
3511:	bdnz	1b
352
353	/* See if we need to enable the cache */
354	cmplwi	r5,0
355	beq	4f
356
357	/* Enable the cache */
358	oris	r3,r3,(L3CR_L3E | L3CR_L3CLKEN)@h
359	mtspr	SPRN_L3CR,r3
360	sync
361
362	/* Wait for stabilize */
363	li	r0,256
364	mtctr	r0
3651:	bdnz	1b
366
367	/* Restore MSR (restores EE and DR bits to original state) */
3684:	SYNC
369	mtmsr	r7
370	isync
371	blr
372
373_GLOBAL(_get_L3CR)
374	/* Return the L3CR contents */
375	li	r3,0
376BEGIN_FTR_SECTION
377	mfspr	r3,SPRN_L3CR
378END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
379	blr
380
381/* --- End of PowerLogix code ---
382 */
383
384
385/* flush_disable_L1()	- Flush and disable L1 cache
386 *
387 * clobbers r0, r3, ctr, cr0
388 *
389 */
390 	.globl	__flush_disable_L1
391 __flush_disable_L1:
392
393	/* Stop pending alitvec streams and memory accesses */
394BEGIN_FTR_SECTION
395	DSSALL
396END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
397 	sync
398
399	/* Load counter to 0x1000 cache lines (128k) and
400	 * load cache with datas
401	 */
402	lis	r3,0x0002
403//	li	r3,0x1000	/* 128kB / 32B */
404	mtctr	r3
405	li	r3, 0
4061:
407	lwz	r0,0(r3)
408	addi	r3,r3,0x0020	/* Go to start of next cache line */
409	bdnz	1b
410	isync
411	sync
412
413	/* Now flush those cache lines */
414	lis	r3,0x0002
415//	li	r3,0x1000	/* 128kB / 32B */
416	mtctr	r3
417	li	r3, 0
4181:
419	dcbf	0,r3
420	addi	r3,r3,0x0020	/* Go to start of next cache line */
421	bdnz	1b
422	sync
423
424	/* We can now disable the L1 cache (HID0:DCE, HID0:ICE) */
425	mfspr	r3,SPRN_HID0
426	rlwinm	r3,r3,0,18,15
427	mtspr	SPRN_HID0,r3
428	sync
429	isync
430 	blr
431
432/* inval_enable_L1	- Invalidate and enable L1 cache
433 *
434 * Assumes L1 is already disabled and MSR:EE is off
435 *
436 * clobbers r3
437 */
438 	.globl	__inval_enable_L1
439 __inval_enable_L1:
440	/* Enable and then Flash inval the instruction & data cache */
441	mfspr	r3,SPRN_HID0
442	ori	r3,r3, HID0_ICE|HID0_ICFI|HID0_DCE|HID0_DCI
443	sync
444	isync
445	mtspr	SPRN_HID0,r3
446	xori	r3,r3, HID0_ICFI|HID0_DCI
447	mtspr	SPRN_HID0,r3
448	sync
449
450 	blr
451
452
453