1 /* $Id: spitfire.h,v 1.18 2001/11/29 16:42:10 kanoj Exp $
2  * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
3  *
4  * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5  */
6 
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
9 
10 #include <asm/asi.h>
11 
12 /* The following register addresses are accessible via ASI_DMMU
13  * and ASI_IMMU, that is there is a distinct and unique copy of
14  * each these registers for each TLB.
15  */
16 #define TSB_TAG_TARGET		0x0000000000000000 /* All chips				*/
17 #define TLB_SFSR		0x0000000000000018 /* All chips				*/
18 #define TSB_REG			0x0000000000000028 /* All chips				*/
19 #define TLB_TAG_ACCESS		0x0000000000000030 /* All chips				*/
20 #define VIRT_WATCHPOINT		0x0000000000000038 /* All chips				*/
21 #define PHYS_WATCHPOINT		0x0000000000000040 /* All chips				*/
22 #define TSB_EXTENSION_P		0x0000000000000048 /* Ultra-III and later		*/
23 #define TSB_EXTENSION_S		0x0000000000000050 /* Ultra-III and later, D-TLB only	*/
24 #define TSB_EXTENSION_N		0x0000000000000058 /* Ultra-III and later		*/
25 #define TLB_TAG_ACCESS_EXT	0x0000000000000060 /* Ultra-III+ and later		*/
26 
27 /* These registers only exist as one entity, and are accessed
28  * via ASI_DMMU only.
29  */
30 #define PRIMARY_CONTEXT		0x0000000000000008
31 #define SECONDARY_CONTEXT	0x0000000000000010
32 #define DMMU_SFAR		0x0000000000000020
33 #define VIRT_WATCHPOINT		0x0000000000000038
34 #define PHYS_WATCHPOINT		0x0000000000000040
35 
36 #define SPITFIRE_HIGHEST_LOCKED_TLBENT	(64 - 1)
37 
38 #ifndef __ASSEMBLY__
39 
40 enum ultra_tlb_layout {
41 	spitfire = 0,
42 	cheetah = 1,
43 	cheetah_plus = 2,
44 };
45 
46 extern enum ultra_tlb_layout tlb_type;
47 
48 #define CHEETAH_HIGHEST_LOCKED_TLBENT	(16 - 1)
49 
50 #define L1DCACHE_SIZE		0x4000
51 
52 #define sparc64_highest_locked_tlbent()	\
53 	(tlb_type == spitfire ? \
54 	 SPITFIRE_HIGHEST_LOCKED_TLBENT : \
55 	 CHEETAH_HIGHEST_LOCKED_TLBENT)
56 
spitfire_get_isfsr(void)57 extern __inline__ unsigned long spitfire_get_isfsr(void)
58 {
59 	unsigned long ret;
60 
61 	__asm__ __volatile__("ldxa	[%1] %2, %0"
62 			     : "=r" (ret)
63 			     : "r" (TLB_SFSR), "i" (ASI_IMMU));
64 	return ret;
65 }
66 
spitfire_get_dsfsr(void)67 extern __inline__ unsigned long spitfire_get_dsfsr(void)
68 {
69 	unsigned long ret;
70 
71 	__asm__ __volatile__("ldxa	[%1] %2, %0"
72 			     : "=r" (ret)
73 			     : "r" (TLB_SFSR), "i" (ASI_DMMU));
74 	return ret;
75 }
76 
spitfire_get_sfar(void)77 extern __inline__ unsigned long spitfire_get_sfar(void)
78 {
79 	unsigned long ret;
80 
81 	__asm__ __volatile__("ldxa	[%1] %2, %0"
82 			     : "=r" (ret)
83 			     : "r" (DMMU_SFAR), "i" (ASI_DMMU));
84 	return ret;
85 }
86 
spitfire_put_isfsr(unsigned long sfsr)87 extern __inline__ void spitfire_put_isfsr(unsigned long sfsr)
88 {
89 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
90 			     "membar	#Sync"
91 			     : /* no outputs */
92 			     : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_IMMU));
93 }
94 
spitfire_put_dsfsr(unsigned long sfsr)95 extern __inline__ void spitfire_put_dsfsr(unsigned long sfsr)
96 {
97 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
98 			     "membar	#Sync"
99 			     : /* no outputs */
100 			     : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_DMMU));
101 }
102 
spitfire_get_primary_context(void)103 extern __inline__ unsigned long spitfire_get_primary_context(void)
104 {
105 	unsigned long ctx;
106 
107 	__asm__ __volatile__("ldxa	[%1] %2, %0"
108 			     : "=r" (ctx)
109 			     : "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
110 	return ctx;
111 }
112 
spitfire_set_primary_context(unsigned long ctx)113 extern __inline__ void spitfire_set_primary_context(unsigned long ctx)
114 {
115 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
116 			     "membar	#Sync"
117 			     : /* No outputs */
118 			     : "r" (ctx & 0x3ff),
119 			       "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
120 	__asm__ __volatile__ ("membar #Sync" : : : "memory");
121 }
122 
spitfire_get_secondary_context(void)123 extern __inline__ unsigned long spitfire_get_secondary_context(void)
124 {
125 	unsigned long ctx;
126 
127 	__asm__ __volatile__("ldxa	[%1] %2, %0"
128 			     : "=r" (ctx)
129 			     : "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
130 	return ctx;
131 }
132 
spitfire_set_secondary_context(unsigned long ctx)133 extern __inline__ void spitfire_set_secondary_context(unsigned long ctx)
134 {
135 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
136 			     "membar	#Sync"
137 			     : /* No outputs */
138 			     : "r" (ctx & 0x3ff),
139 			       "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
140 	__asm__ __volatile__ ("membar #Sync" : : : "memory");
141 }
142 
143 /* The data cache is write through, so this just invalidates the
144  * specified line.
145  */
spitfire_put_dcache_tag(unsigned long addr,unsigned long tag)146 extern __inline__ void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
147 {
148 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
149 			     "membar	#Sync"
150 			     : /* No outputs */
151 			     : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
152 	__asm__ __volatile__ ("membar #Sync" : : : "memory");
153 }
154 
155 /* The instruction cache lines are flushed with this, but note that
156  * this does not flush the pipeline.  It is possible for a line to
157  * get flushed but stale instructions to still be in the pipeline,
158  * a flush instruction (to any address) is sufficient to handle
159  * this issue after the line is invalidated.
160  */
spitfire_put_icache_tag(unsigned long addr,unsigned long tag)161 extern __inline__ void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
162 {
163 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
164 			     "membar	#Sync"
165 			     : /* No outputs */
166 			     : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
167 }
168 
spitfire_get_dtlb_data(int entry)169 extern __inline__ unsigned long spitfire_get_dtlb_data(int entry)
170 {
171 	unsigned long data;
172 
173 	__asm__ __volatile__("ldxa	[%1] %2, %0"
174 			     : "=r" (data)
175 			     : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
176 
177 	/* Clear TTE diag bits. */
178 	data &= ~0x0003fe0000000000UL;
179 
180 	return data;
181 }
182 
spitfire_get_dtlb_tag(int entry)183 extern __inline__ unsigned long spitfire_get_dtlb_tag(int entry)
184 {
185 	unsigned long tag;
186 
187 	__asm__ __volatile__("ldxa	[%1] %2, %0"
188 			     : "=r" (tag)
189 			     : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
190 	return tag;
191 }
192 
spitfire_put_dtlb_data(int entry,unsigned long data)193 extern __inline__ void spitfire_put_dtlb_data(int entry, unsigned long data)
194 {
195 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
196 			     "membar	#Sync"
197 			     : /* No outputs */
198 			     : "r" (data), "r" (entry << 3),
199 			       "i" (ASI_DTLB_DATA_ACCESS));
200 }
201 
spitfire_get_itlb_data(int entry)202 extern __inline__ unsigned long spitfire_get_itlb_data(int entry)
203 {
204 	unsigned long data;
205 
206 	__asm__ __volatile__("ldxa	[%1] %2, %0"
207 			     : "=r" (data)
208 			     : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
209 
210 	/* Clear TTE diag bits. */
211 	data &= ~0x0003fe0000000000UL;
212 
213 	return data;
214 }
215 
spitfire_get_itlb_tag(int entry)216 extern __inline__ unsigned long spitfire_get_itlb_tag(int entry)
217 {
218 	unsigned long tag;
219 
220 	__asm__ __volatile__("ldxa	[%1] %2, %0"
221 			     : "=r" (tag)
222 			     : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
223 	return tag;
224 }
225 
spitfire_put_itlb_data(int entry,unsigned long data)226 extern __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
227 {
228 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
229 			     "membar	#Sync"
230 			     : /* No outputs */
231 			     : "r" (data), "r" (entry << 3),
232 			       "i" (ASI_ITLB_DATA_ACCESS));
233 }
234 
235 /* Spitfire hardware assisted TLB flushes. */
236 
237 /* Context level flushes. */
spitfire_flush_dtlb_primary_context(void)238 extern __inline__ void spitfire_flush_dtlb_primary_context(void)
239 {
240 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
241 			     "membar	#Sync"
242 			     : /* No outputs */
243 			     : "r" (0x40), "i" (ASI_DMMU_DEMAP));
244 }
245 
spitfire_flush_itlb_primary_context(void)246 extern __inline__ void spitfire_flush_itlb_primary_context(void)
247 {
248 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
249 			     "membar	#Sync"
250 			     : /* No outputs */
251 			     : "r" (0x40), "i" (ASI_IMMU_DEMAP));
252 }
253 
spitfire_flush_dtlb_secondary_context(void)254 extern __inline__ void spitfire_flush_dtlb_secondary_context(void)
255 {
256 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
257 			     "membar	#Sync"
258 			     : /* No outputs */
259 			     : "r" (0x50), "i" (ASI_DMMU_DEMAP));
260 }
261 
spitfire_flush_itlb_secondary_context(void)262 extern __inline__ void spitfire_flush_itlb_secondary_context(void)
263 {
264 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
265 			     "membar	#Sync"
266 			     : /* No outputs */
267 			     : "r" (0x50), "i" (ASI_IMMU_DEMAP));
268 }
269 
spitfire_flush_dtlb_nucleus_context(void)270 extern __inline__ void spitfire_flush_dtlb_nucleus_context(void)
271 {
272 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
273 			     "membar	#Sync"
274 			     : /* No outputs */
275 			     : "r" (0x60), "i" (ASI_DMMU_DEMAP));
276 }
277 
spitfire_flush_itlb_nucleus_context(void)278 extern __inline__ void spitfire_flush_itlb_nucleus_context(void)
279 {
280 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
281 			     "membar	#Sync"
282 			     : /* No outputs */
283 			     : "r" (0x60), "i" (ASI_IMMU_DEMAP));
284 }
285 
286 /* Page level flushes. */
spitfire_flush_dtlb_primary_page(unsigned long page)287 extern __inline__ void spitfire_flush_dtlb_primary_page(unsigned long page)
288 {
289 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
290 			     "membar	#Sync"
291 			     : /* No outputs */
292 			     : "r" (page), "i" (ASI_DMMU_DEMAP));
293 }
294 
spitfire_flush_itlb_primary_page(unsigned long page)295 extern __inline__ void spitfire_flush_itlb_primary_page(unsigned long page)
296 {
297 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
298 			     "membar	#Sync"
299 			     : /* No outputs */
300 			     : "r" (page), "i" (ASI_IMMU_DEMAP));
301 }
302 
spitfire_flush_dtlb_secondary_page(unsigned long page)303 extern __inline__ void spitfire_flush_dtlb_secondary_page(unsigned long page)
304 {
305 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
306 			     "membar	#Sync"
307 			     : /* No outputs */
308 			     : "r" (page | 0x10), "i" (ASI_DMMU_DEMAP));
309 }
310 
spitfire_flush_itlb_secondary_page(unsigned long page)311 extern __inline__ void spitfire_flush_itlb_secondary_page(unsigned long page)
312 {
313 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
314 			     "membar	#Sync"
315 			     : /* No outputs */
316 			     : "r" (page | 0x10), "i" (ASI_IMMU_DEMAP));
317 }
318 
spitfire_flush_dtlb_nucleus_page(unsigned long page)319 extern __inline__ void spitfire_flush_dtlb_nucleus_page(unsigned long page)
320 {
321 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
322 			     "membar	#Sync"
323 			     : /* No outputs */
324 			     : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
325 }
326 
spitfire_flush_itlb_nucleus_page(unsigned long page)327 extern __inline__ void spitfire_flush_itlb_nucleus_page(unsigned long page)
328 {
329 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
330 			     "membar	#Sync"
331 			     : /* No outputs */
332 			     : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
333 }
334 
335 /* Cheetah has "all non-locked" tlb flushes. */
cheetah_flush_dtlb_all(void)336 extern __inline__ void cheetah_flush_dtlb_all(void)
337 {
338 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
339 			     "membar	#Sync"
340 			     : /* No outputs */
341 			     : "r" (0x80), "i" (ASI_DMMU_DEMAP));
342 }
343 
cheetah_flush_itlb_all(void)344 extern __inline__ void cheetah_flush_itlb_all(void)
345 {
346 	__asm__ __volatile__("stxa	%%g0, [%0] %1\n\t"
347 			     "membar	#Sync"
348 			     : /* No outputs */
349 			     : "r" (0x80), "i" (ASI_IMMU_DEMAP));
350 }
351 
352 /* Cheetah has a 4-tlb layout so direct access is a bit different.
353  * The first two TLBs are fully assosciative, hold 16 entries, and are
354  * used only for locked and >8K sized translations.  One exists for
355  * data accesses and one for instruction accesses.
356  *
357  * The third TLB is for data accesses to 8K non-locked translations, is
358  * 2 way assosciative, and holds 512 entries.  The fourth TLB is for
359  * instruction accesses to 8K non-locked translations, is 2 way
360  * assosciative, and holds 128 entries.
361  *
362  * Cheetah has some bug where bogus data can be returned from
363  * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
364  * the problem for me. -DaveM
365  */
cheetah_get_ldtlb_data(int entry)366 extern __inline__ unsigned long cheetah_get_ldtlb_data(int entry)
367 {
368 	unsigned long data;
369 
370 	__asm__ __volatile__("ldxa	[%1] %2, %%g0\n\t"
371 			     "ldxa	[%1] %2, %0"
372 			     : "=r" (data)
373 			     : "r" ((0 << 16) | (entry << 3)),
374 			     "i" (ASI_DTLB_DATA_ACCESS));
375 
376 	return data;
377 }
378 
cheetah_get_litlb_data(int entry)379 extern __inline__ unsigned long cheetah_get_litlb_data(int entry)
380 {
381 	unsigned long data;
382 
383 	__asm__ __volatile__("ldxa	[%1] %2, %%g0\n\t"
384 			     "ldxa	[%1] %2, %0"
385 			     : "=r" (data)
386 			     : "r" ((0 << 16) | (entry << 3)),
387 			     "i" (ASI_ITLB_DATA_ACCESS));
388 
389 	return data;
390 }
391 
cheetah_get_ldtlb_tag(int entry)392 extern __inline__ unsigned long cheetah_get_ldtlb_tag(int entry)
393 {
394 	unsigned long tag;
395 
396 	__asm__ __volatile__("ldxa	[%1] %2, %0"
397 			     : "=r" (tag)
398 			     : "r" ((0 << 16) | (entry << 3)),
399 			     "i" (ASI_DTLB_TAG_READ));
400 
401 	return tag;
402 }
403 
cheetah_get_litlb_tag(int entry)404 extern __inline__ unsigned long cheetah_get_litlb_tag(int entry)
405 {
406 	unsigned long tag;
407 
408 	__asm__ __volatile__("ldxa	[%1] %2, %0"
409 			     : "=r" (tag)
410 			     : "r" ((0 << 16) | (entry << 3)),
411 			     "i" (ASI_ITLB_TAG_READ));
412 
413 	return tag;
414 }
415 
cheetah_put_ldtlb_data(int entry,unsigned long data)416 extern __inline__ void cheetah_put_ldtlb_data(int entry, unsigned long data)
417 {
418 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
419 			     "membar	#Sync"
420 			     : /* No outputs */
421 			     : "r" (data),
422 			       "r" ((0 << 16) | (entry << 3)),
423 			       "i" (ASI_DTLB_DATA_ACCESS));
424 }
425 
cheetah_put_litlb_data(int entry,unsigned long data)426 extern __inline__ void cheetah_put_litlb_data(int entry, unsigned long data)
427 {
428 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
429 			     "membar	#Sync"
430 			     : /* No outputs */
431 			     : "r" (data),
432 			       "r" ((0 << 16) | (entry << 3)),
433 			       "i" (ASI_ITLB_DATA_ACCESS));
434 }
435 
cheetah_get_dtlb_data(int entry,int tlb)436 extern __inline__ unsigned long cheetah_get_dtlb_data(int entry, int tlb)
437 {
438 	unsigned long data;
439 
440 	__asm__ __volatile__("ldxa	[%1] %2, %%g0\n\t"
441 			     "ldxa	[%1] %2, %0"
442 			     : "=r" (data)
443 			     : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
444 
445 	return data;
446 }
447 
cheetah_get_dtlb_tag(int entry,int tlb)448 extern __inline__ unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
449 {
450 	unsigned long tag;
451 
452 	__asm__ __volatile__("ldxa	[%1] %2, %0"
453 			     : "=r" (tag)
454 			     : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
455 	return tag;
456 }
457 
cheetah_put_dtlb_data(int entry,unsigned long data,int tlb)458 extern __inline__ void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
459 {
460 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
461 			     "membar	#Sync"
462 			     : /* No outputs */
463 			     : "r" (data),
464 			       "r" ((tlb << 16) | (entry << 3)),
465 			       "i" (ASI_DTLB_DATA_ACCESS));
466 }
467 
cheetah_get_itlb_data(int entry)468 extern __inline__ unsigned long cheetah_get_itlb_data(int entry)
469 {
470 	unsigned long data;
471 
472 	__asm__ __volatile__("ldxa	[%1] %2, %%g0\n\t"
473 			     "ldxa	[%1] %2, %0"
474 			     : "=r" (data)
475 			     : "r" ((2 << 16) | (entry << 3)),
476                                "i" (ASI_ITLB_DATA_ACCESS));
477 
478 	return data;
479 }
480 
cheetah_get_itlb_tag(int entry)481 extern __inline__ unsigned long cheetah_get_itlb_tag(int entry)
482 {
483 	unsigned long tag;
484 
485 	__asm__ __volatile__("ldxa	[%1] %2, %0"
486 			     : "=r" (tag)
487 			     : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
488 	return tag;
489 }
490 
cheetah_put_itlb_data(int entry,unsigned long data)491 extern __inline__ void cheetah_put_itlb_data(int entry, unsigned long data)
492 {
493 	__asm__ __volatile__("stxa	%0, [%1] %2\n\t"
494 			     "membar	#Sync"
495 			     : /* No outputs */
496 			     : "r" (data), "r" ((2 << 16) | (entry << 3)),
497 			       "i" (ASI_ITLB_DATA_ACCESS));
498 }
499 
500 #endif /* !(__ASSEMBLY__) */
501 
502 #endif /* !(_SPARC64_SPITFIRE_H) */
503