1 /* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1994, 1995, 1996, 2001 Ralf Baechle 7 * Copyright (C) 1994, 1995, 1996 Paul M. Antoine. 8 */ 9 #ifndef __ASM_STACKFRAME_H 10 #define __ASM_STACKFRAME_H 11 12 #include <linux/config.h> 13 #include <asm/addrspace.h> 14 #include <asm/mipsregs.h> 15 #include <asm/processor.h> 16 #include <asm/asm.h> 17 #include <asm/offset.h> 18 19 #define SAVE_AT \ 20 .set push; \ 21 .set noat; \ 22 sw $1, PT_R1(sp); \ 23 .set pop 24 25 #define SAVE_TEMP \ 26 mfhi v1; \ 27 sw $8, PT_R8(sp); \ 28 sw $9, PT_R9(sp); \ 29 sw v1, PT_HI(sp); \ 30 mflo v1; \ 31 sw $10,PT_R10(sp); \ 32 sw $11, PT_R11(sp); \ 33 sw v1, PT_LO(sp); \ 34 sw $12, PT_R12(sp); \ 35 sw $13, PT_R13(sp); \ 36 sw $14, PT_R14(sp); \ 37 sw $15, PT_R15(sp); \ 38 sw $24, PT_R24(sp) 39 40 #define SAVE_STATIC \ 41 sw $16, PT_R16(sp); \ 42 sw $17, PT_R17(sp); \ 43 sw $18, PT_R18(sp); \ 44 sw $19, PT_R19(sp); \ 45 sw $20, PT_R20(sp); \ 46 sw $21, PT_R21(sp); \ 47 sw $22, PT_R22(sp); \ 48 sw $23, PT_R23(sp); \ 49 sw $30, PT_R30(sp) 50 51 #ifdef CONFIG_SMP 52 # define GET_SAVED_SP \ 53 mfc0 k0, CP0_CONTEXT; \ 54 lui k1, %hi(kernelsp); \ 55 srl k0, k0, 23; \ 56 sll k0, k0, 2; \ 57 addu k1, k0; \ 58 lw k1, %lo(kernelsp)(k1); 59 60 #else 61 # define GET_SAVED_SP \ 62 lui k1, %hi(kernelsp); \ 63 lw k1, %lo(kernelsp)(k1); 64 #endif 65 66 #define SAVE_SOME \ 67 .set push; \ 68 .set reorder; \ 69 mfc0 k0, CP0_STATUS; \ 70 sll k0, 3; /* extract cu0 bit */ \ 71 .set noreorder; \ 72 bltz k0, 8f; \ 73 move k1, sp; \ 74 .set reorder; \ 75 /* Called from user mode, new stack. */ \ 76 GET_SAVED_SP \ 77 8: \ 78 move k0, sp; \ 79 subu sp, k1, PT_SIZE; \ 80 sw k0, PT_R29(sp); \ 81 sw $3, PT_R3(sp); \ 82 sw $0, PT_R0(sp); \ 83 mfc0 v1, CP0_STATUS; \ 84 sw $2, PT_R2(sp); \ 85 sw v1, PT_STATUS(sp); \ 86 sw $4, PT_R4(sp); \ 87 mfc0 v1, CP0_CAUSE; \ 88 sw $5, PT_R5(sp); \ 89 sw v1, PT_CAUSE(sp); \ 90 sw $6, PT_R6(sp); \ 91 mfc0 v1, CP0_EPC; \ 92 sw $7, PT_R7(sp); \ 93 sw v1, PT_EPC(sp); \ 94 sw $25, PT_R25(sp); \ 95 sw $28, PT_R28(sp); \ 96 sw $31, PT_R31(sp); \ 97 ori $28, sp, 0x1fff; \ 98 xori $28, 0x1fff; \ 99 .set pop 100 101 #define SAVE_ALL \ 102 SAVE_SOME; \ 103 SAVE_AT; \ 104 SAVE_TEMP; \ 105 SAVE_STATIC 106 107 #define RESTORE_AT \ 108 .set push; \ 109 .set noat; \ 110 lw $1, PT_R1(sp); \ 111 .set pop; 112 113 #define RESTORE_TEMP \ 114 lw $24, PT_LO(sp); \ 115 lw $8, PT_R8(sp); \ 116 lw $9, PT_R9(sp); \ 117 mtlo $24; \ 118 lw $24, PT_HI(sp); \ 119 lw $10,PT_R10(sp); \ 120 lw $11, PT_R11(sp); \ 121 mthi $24; \ 122 lw $12, PT_R12(sp); \ 123 lw $13, PT_R13(sp); \ 124 lw $14, PT_R14(sp); \ 125 lw $15, PT_R15(sp); \ 126 lw $24, PT_R24(sp) 127 128 #define RESTORE_STATIC \ 129 lw $16, PT_R16(sp); \ 130 lw $17, PT_R17(sp); \ 131 lw $18, PT_R18(sp); \ 132 lw $19, PT_R19(sp); \ 133 lw $20, PT_R20(sp); \ 134 lw $21, PT_R21(sp); \ 135 lw $22, PT_R22(sp); \ 136 lw $23, PT_R23(sp); \ 137 lw $30, PT_R30(sp) 138 139 #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 140 141 #define RESTORE_SOME \ 142 .set push; \ 143 .set reorder; \ 144 mfc0 t0, CP0_STATUS; \ 145 .set pop; \ 146 ori t0, 0x1f; \ 147 xori t0, 0x1f; \ 148 mtc0 t0, CP0_STATUS; \ 149 li v1, 0xff00; \ 150 and t0, v1; \ 151 lw v0, PT_STATUS(sp); \ 152 nor v1, $0, v1; \ 153 and v0, v1; \ 154 or v0, t0; \ 155 mtc0 v0, CP0_STATUS; \ 156 lw $31, PT_R31(sp); \ 157 lw $28, PT_R28(sp); \ 158 lw $25, PT_R25(sp); \ 159 lw $7, PT_R7(sp); \ 160 lw $6, PT_R6(sp); \ 161 lw $5, PT_R5(sp); \ 162 lw $4, PT_R4(sp); \ 163 lw $3, PT_R3(sp); \ 164 lw $2, PT_R2(sp) 165 166 #define RESTORE_SP_AND_RET \ 167 .set push; \ 168 .set noreorder; \ 169 lw k0, PT_EPC(sp); \ 170 lw sp, PT_R29(sp); \ 171 jr k0; \ 172 rfe; \ 173 .set pop 174 175 #else 176 177 #define RESTORE_SOME \ 178 .set push; \ 179 .set reorder; \ 180 mfc0 t0, CP0_STATUS; \ 181 .set pop; \ 182 ori t0, 0x1f; \ 183 xori t0, 0x1f; \ 184 mtc0 t0, CP0_STATUS; \ 185 li v1, 0xff00; \ 186 and t0, v1; \ 187 lw v0, PT_STATUS(sp); \ 188 nor v1, $0, v1; \ 189 and v0, v1; \ 190 or v0, t0; \ 191 mtc0 v0, CP0_STATUS; \ 192 lw v1, PT_EPC(sp); \ 193 mtc0 v1, CP0_EPC; \ 194 lw $31, PT_R31(sp); \ 195 lw $28, PT_R28(sp); \ 196 lw $25, PT_R25(sp); \ 197 lw $7, PT_R7(sp); \ 198 lw $6, PT_R6(sp); \ 199 lw $5, PT_R5(sp); \ 200 lw $4, PT_R4(sp); \ 201 lw $3, PT_R3(sp); \ 202 lw $2, PT_R2(sp) 203 204 #define RESTORE_SP_AND_RET \ 205 lw sp, PT_R29(sp); \ 206 .set mips3; \ 207 eret; \ 208 .set mips0 209 210 #endif 211 212 #define RESTORE_SP \ 213 lw sp, PT_R29(sp); \ 214 215 #define RESTORE_ALL \ 216 RESTORE_SOME; \ 217 RESTORE_AT; \ 218 RESTORE_TEMP; \ 219 RESTORE_STATIC; \ 220 RESTORE_SP 221 222 #define RESTORE_ALL_AND_RET \ 223 RESTORE_SOME; \ 224 RESTORE_AT; \ 225 RESTORE_TEMP; \ 226 RESTORE_STATIC; \ 227 RESTORE_SP_AND_RET 228 229 230 /* 231 * Move to kernel mode and disable interrupts. 232 * Set cp0 enable bit as sign that we're running on the kernel stack 233 */ 234 #define CLI \ 235 mfc0 t0,CP0_STATUS; \ 236 li t1,ST0_CU0|0x1f; \ 237 or t0,t1; \ 238 xori t0,0x1f; \ 239 mtc0 t0,CP0_STATUS 240 241 /* 242 * Move to kernel mode and enable interrupts. 243 * Set cp0 enable bit as sign that we're running on the kernel stack 244 */ 245 #define STI \ 246 mfc0 t0,CP0_STATUS; \ 247 li t1,ST0_CU0|0x1f; \ 248 or t0,t1; \ 249 xori t0,0x1e; \ 250 mtc0 t0,CP0_STATUS 251 252 /* 253 * Just move to kernel mode and leave interrupts as they are. 254 * Set cp0 enable bit as sign that we're running on the kernel stack 255 */ 256 #define KMODE \ 257 mfc0 t0,CP0_STATUS; \ 258 li t1,ST0_CU0|0x1e; \ 259 or t0,t1; \ 260 xori t0,0x1e; \ 261 mtc0 t0,CP0_STATUS 262 263 #endif /* __ASM_STACKFRAME_H */ 264