1/* $Id: winfixup.S,v 1.29 2000/03/26 09:13:48 davem Exp $
2 *
3 * winfixup.S: Handle cases where user stack pointer is found to be bogus.
4 *
5 * Copyright (C) 1997 David S. Miller (davem@caip.rutgers.edu)
6 */
7
8#include <asm/asi.h>
9#include <asm/head.h>
10#include <asm/page.h>
11#include <asm/ptrace.h>
12#include <asm/processor.h>
13#include <asm/spitfire.h>
14#include <asm/asm_offsets.h>
15
16	.text
17	.align	32
18
19	/* Here are the rules, pay attention.
20	 *
21	 * The kernel is disallowed from touching user space while
22	 * the trap level is greater than zero, except for from within
23	 * the window spill/fill handlers.  This must be followed
24	 * so that we can easily detect the case where we tried to
25	 * spill/fill with a bogus (or unmapped) user stack pointer.
26	 *
27	 * These are layed out in a special way for cache reasons,
28	 * don't touch...
29	 */
30	.globl	fill_fixup, spill_fixup
31fill_fixup:
32	rdpr		%tstate, %g1
33	andcc		%g1, TSTATE_PRIV, %g0
34	or		%g4, FAULT_CODE_WINFIXUP, %g4
35	be,pt		%xcc, window_scheisse_from_user_common
36	 and		%g1, TSTATE_CWP, %g1
37
38	/* This is the extremely complex case, but it does happen from
39	 * time to time if things are just right.  Essentially the restore
40	 * done in rtrap right before going back to user mode, with tl=1
41	 * and that levels trap stack registers all setup, took a fill trap,
42	 * the user stack was not mapped in the tlb, and tlb miss occurred,
43	 * the pte found was not valid, and a simple ref bit watch update
44	 * could not satisfy the miss, so we got here.
45	 *
46	 * We must carefully unwind the state so we get back to tl=0, preserve
47	 * all the register values we were going to give to the user.  Luckily
48	 * most things are where they need to be, we also have the address
49	 * which triggered the fault handy as well.
50	 *
51	 * Also note that we must preserve %l5 and %l6.  If the user was
52	 * returning from a system call, we must make it look this way
53	 * after we process the fill fault on the users stack.
54	 *
55	 * First, get into the window where the original restore was executed.
56	 */
57
58	rdpr		%wstate, %g2			! Grab user mode wstate.
59	wrpr		%g1, %cwp			! Get into the right window.
60	sll		%g2, 3, %g2			! NORMAL-->OTHER
61
62	wrpr		%g0, 0x0, %canrestore		! Standard etrap stuff.
63	wrpr		%g2, 0x0, %wstate		! This must be consistant.
64	wrpr		%g0, 0x0, %otherwin		! We know this.
65	mov		PRIMARY_CONTEXT, %g1		! Change contexts...
66	stxa		%g0, [%g1] ASI_DMMU		! Back into the nucleus.
67	flush		%g6				! Flush instruction buffers
68	rdpr		%pstate, %l1			! Prepare to change globals.
69	mov		%g6, %o7			! Get current.
70
71	andn		%l1, PSTATE_MM, %l1		! We want to be in RMO
72	stb		%g4, [%g6 + AOFF_task_thread + AOFF_thread_fault_code]
73	stx		%g5, [%g6 + AOFF_task_thread + AOFF_thread_fault_address]
74	wrpr		%g0, 0x0, %tl			! Out of trap levels.
75	wrpr		%l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
76	sethi		%uhi(PAGE_OFFSET), %g4		! Prepare page_offset global reg
77	mov		%o7, %g6
78	sllx		%g4, 32, %g4			! and finish it...
79
80	/* This is the same as below, except we handle this a bit special
81	 * since we must preserve %l5 and %l6, see comment above.
82	 */
83	call		do_sparc64_fault
84	 add		%sp, PTREGS_OFF, %o0
85	ba,pt		%xcc, rtrap
86	 nop						! yes, nop is correct
87
88	/* Be very careful about usage of the alternate globals here.
89	 * You cannot touch %g4/%g5 as that has the fault information
90	 * should this be from usermode.  Also be careful for the case
91	 * where we get here from the save instruction in etrap.S when
92	 * coming from either user or kernel (does not matter which, it
93	 * is the same problem in both cases).  Essentially this means
94	 * do not touch %g7 or %g2 so we handle the two cases fine.
95	 */
96spill_fixup:
97	ldub		[%g6 + AOFF_task_thread + AOFF_thread_flags], %g1
98	andcc		%g1, SPARC_FLAG_32BIT, %g0
99	ldub		[%g6 + AOFF_task_thread + AOFF_thread_w_saved], %g1
100
101	sll		%g1, 3, %g3
102	add		%g6, %g3, %g3
103	stx		%sp, [%g3 + AOFF_task_thread + AOFF_thread_rwbuf_stkptrs]
104	sll		%g1, 7, %g3
105	bne,pt		%xcc, 1f
106	 add		%g6, %g3, %g3
107	stx		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
108	stx		%l1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
109
110	stx		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
111	stx		%l3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
112	stx		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
113	stx		%l5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
114	stx		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
115	stx		%l7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
116	stx		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x40]
117	stx		%i1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x48]
118
119	stx		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x50]
120	stx		%i3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x58]
121	stx		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x60]
122	stx		%i5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x68]
123	stx		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x70]
124	b,pt		%xcc, 2f
125	 stx		%i7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x78]
1261:	stw		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
127
128	stw		%l1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x04]
129	stw		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
130	stw		%l3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x0c]
131	stw		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
132	stw		%l5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x14]
133	stw		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
134	stw		%l7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x1c]
135	stw		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
136
137	stw		%i1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x24]
138	stw		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
139	stw		%i3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x2c]
140	stw		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
141	stw		%i5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x34]
142	stw		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
143	stw		%i7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x3c]
1442:	add		%g1, 1, %g1
145
146	stb		%g1, [%g6 + AOFF_task_thread + AOFF_thread_w_saved]
147	rdpr		%tstate, %g1
148	andcc		%g1, TSTATE_PRIV, %g0
149	saved
150	and		%g1, TSTATE_CWP, %g1
151	be,pn		%xcc, window_scheisse_from_user_common
152	 mov		FAULT_CODE_WRITE | FAULT_CODE_DTLB | FAULT_CODE_WINFIXUP, %g4
153	retry
154
155window_scheisse_from_user_common:
156	stb		%g4, [%g6 + AOFF_task_thread + AOFF_thread_fault_code]
157	stx		%g5, [%g6 + AOFF_task_thread + AOFF_thread_fault_address]
158	wrpr		%g1, %cwp
159	ba,pt		%xcc, etrap
160	 rd		%pc, %g7
161	call		do_sparc64_fault
162	 add		%sp, PTREGS_OFF, %o0
163	ba,a,pt		%xcc, rtrap_clr_l6
164
165	.globl		winfix_mna, fill_fixup_mna, spill_fixup_mna
166winfix_mna:
167	andn		%g3, 0x7f, %g3
168	add		%g3, 0x78, %g3
169	wrpr		%g3, %tnpc
170	done
171fill_fixup_mna:
172	rdpr		%tstate, %g1
173	andcc		%g1, TSTATE_PRIV, %g0
174	be,pt		%xcc, window_mna_from_user_common
175	 and		%g1, TSTATE_CWP, %g1
176
177	/* Please, see fill_fixup commentary about why we must preserve
178	 * %l5 and %l6 to preserve absolute correct semantics.
179	 */
180	rdpr		%wstate, %g2			! Grab user mode wstate.
181	wrpr		%g1, %cwp			! Get into the right window.
182	sll		%g2, 3, %g2			! NORMAL-->OTHER
183	wrpr		%g0, 0x0, %canrestore		! Standard etrap stuff.
184
185	wrpr		%g2, 0x0, %wstate		! This must be consistant.
186	wrpr		%g0, 0x0, %otherwin		! We know this.
187	mov		PRIMARY_CONTEXT, %g1		! Change contexts...
188	stxa		%g0, [%g1] ASI_DMMU		! Back into the nucleus.
189	flush		%g6				! Flush instruction buffers
190	rdpr		%pstate, %l1			! Prepare to change globals.
191	mov		%g4, %o2			! Setup args for
192	mov		%g5, %o1			! final call to mem_address_unaligned.
193	andn		%l1, PSTATE_MM, %l1		! We want to be in RMO
194
195	mov		%g6, %o7			! Stash away current.
196	wrpr		%g0, 0x0, %tl			! Out of trap levels.
197	wrpr		%l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
198	sethi		%uhi(PAGE_OFFSET), %g4		! Set page_offset global reg.
199	mov		%o7, %g6			! Get current back.
200	sllx		%g4, 32, %g4			! Finish it.
201	call		mem_address_unaligned
202	 add		%sp, PTREGS_OFF, %o0
203
204	b,pt		%xcc, rtrap
205	 nop						! yes, the nop is correct
206spill_fixup_mna:
207	ldub		[%g6 + AOFF_task_thread + AOFF_thread_flags], %g1
208	andcc		%g1, SPARC_FLAG_32BIT, %g0
209	ldub		[%g6 + AOFF_task_thread + AOFF_thread_w_saved], %g1
210	sll		%g1, 3, %g3
211	add		%g6, %g3, %g3
212	stx		%sp, [%g3 + AOFF_task_thread + AOFF_thread_rwbuf_stkptrs]
213
214	sll		%g1, 7, %g3
215	bne,pt		%xcc, 1f
216	 add		%g6, %g3, %g3
217	stx		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
218	stx		%l1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
219	stx		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
220	stx		%l3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
221	stx		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
222
223	stx		%l5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
224	stx		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
225	stx		%l7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
226	stx		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x40]
227	stx		%i1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x48]
228	stx		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x50]
229	stx		%i3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x58]
230	stx		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x60]
231
232	stx		%i5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x68]
233	stx		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x70]
234	stx		%i7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x78]
235	b,pt		%xcc, 2f
236	 add		%g1, 1, %g1
2371:	std		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
238	std		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
239	std		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
240
241	std		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
242	std		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
243	std		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
244	std		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
245	std		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
246	add		%g1, 1, %g1
2472:	stb		%g1, [%g6 + AOFF_task_thread + AOFF_thread_w_saved]
248	rdpr		%tstate, %g1
249
250	andcc		%g1, TSTATE_PRIV, %g0
251	saved
252	be,pn		%xcc, window_mna_from_user_common
253	 and		%g1, TSTATE_CWP, %g1
254	retry
255window_mna_from_user_common:
256	wrpr		%g1, %cwp
257	sethi		%hi(109f), %g7
258	ba,pt		%xcc, etrap
259109:	 or		%g7, %lo(109b), %g7
260	mov		%l4, %o2
261	mov		%l5, %o1
262	call		mem_address_unaligned
263	 add		%sp, PTREGS_OFF, %o0
264	ba,pt		%xcc, rtrap
265	 clr		%l6
266
267	/* These are only needed for 64-bit mode processes which
268	 * put their stack pointer into the VPTE area and there
269	 * happens to be a VPTE tlb entry mapped there during
270	 * a spill/fill trap to that stack frame.
271	 */
272	.globl		winfix_dax, fill_fixup_dax, spill_fixup_dax
273winfix_dax:
274	andn		%g3, 0x7f, %g3
275	add		%g3, 0x74, %g3
276	wrpr		%g3, %tnpc
277	done
278fill_fixup_dax:
279	rdpr		%tstate, %g1
280	andcc		%g1, TSTATE_PRIV, %g0
281	be,pt		%xcc, window_dax_from_user_common
282	 and		%g1, TSTATE_CWP, %g1
283
284	/* Please, see fill_fixup commentary about why we must preserve
285	 * %l5 and %l6 to preserve absolute correct semantics.
286	 */
287	rdpr		%wstate, %g2			! Grab user mode wstate.
288	wrpr		%g1, %cwp			! Get into the right window.
289	sll		%g2, 3, %g2			! NORMAL-->OTHER
290	wrpr		%g0, 0x0, %canrestore		! Standard etrap stuff.
291
292	wrpr		%g2, 0x0, %wstate		! This must be consistant.
293	wrpr		%g0, 0x0, %otherwin		! We know this.
294	mov		PRIMARY_CONTEXT, %g1		! Change contexts...
295	stxa		%g0, [%g1] ASI_DMMU		! Back into the nucleus.
296	flush		%g6				! Flush instruction buffers
297	rdpr		%pstate, %l1			! Prepare to change globals.
298	mov		%g4, %o1			! Setup args for
299	mov		%g5, %o2			! final call to spitfire_data_access_exception.
300	andn		%l1, PSTATE_MM, %l1		! We want to be in RMO
301
302	mov		%g6, %o7			! Stash away current.
303	wrpr		%g0, 0x0, %tl			! Out of trap levels.
304	wrpr		%l1, (PSTATE_IE | PSTATE_AG | PSTATE_RMO), %pstate
305	sethi		%uhi(PAGE_OFFSET), %g4		! Set page_offset global reg.
306	mov		%o7, %g6			! Get current back.
307	sllx		%g4, 32, %g4			! Finish it.
308	call		spitfire_data_access_exception
309	 add		%sp, PTREGS_OFF, %o0
310
311	b,pt		%xcc, rtrap
312	 nop						! yes, the nop is correct
313spill_fixup_dax:
314	ldub		[%g6 + AOFF_task_thread + AOFF_thread_flags], %g1
315	andcc		%g1, SPARC_FLAG_32BIT, %g0
316	ldub		[%g6 + AOFF_task_thread + AOFF_thread_w_saved], %g1
317	sll		%g1, 3, %g3
318	add		%g6, %g3, %g3
319	stx		%sp, [%g3 + AOFF_task_thread + AOFF_thread_rwbuf_stkptrs]
320
321	sll		%g1, 7, %g3
322	bne,pt		%xcc, 1f
323	 add		%g6, %g3, %g3
324	stx		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
325	stx		%l1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
326	stx		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
327	stx		%l3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
328	stx		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
329
330	stx		%l5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
331	stx		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
332	stx		%l7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
333	stx		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x40]
334	stx		%i1, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x48]
335	stx		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x50]
336	stx		%i3, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x58]
337	stx		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x60]
338
339	stx		%i5, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x68]
340	stx		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x70]
341	stx		%i7, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x78]
342	b,pt		%xcc, 2f
343	 add		%g1, 1, %g1
3441:	std		%l0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x00]
345	std		%l2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x08]
346	std		%l4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x10]
347
348	std		%l6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x18]
349	std		%i0, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x20]
350	std		%i2, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x28]
351	std		%i4, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x30]
352	std		%i6, [%g3 + AOFF_task_thread + AOFF_thread_reg_window + 0x38]
353	add		%g1, 1, %g1
3542:	stb		%g1, [%g6 + AOFF_task_thread + AOFF_thread_w_saved]
355	rdpr		%tstate, %g1
356
357	andcc		%g1, TSTATE_PRIV, %g0
358	saved
359	be,pn		%xcc, window_dax_from_user_common
360	 and		%g1, TSTATE_CWP, %g1
361	retry
362window_dax_from_user_common:
363	wrpr		%g1, %cwp
364	sethi		%hi(109f), %g7
365	ba,pt		%xcc, etrap
366109:	 or		%g7, %lo(109b), %g7
367	mov		%l4, %o1
368	mov		%l5, %o2
369	call		spitfire_data_access_exception
370	 add		%sp, PTREGS_OFF, %o0
371	ba,pt		%xcc, rtrap
372	 clr		%l6
373
374