1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Author: Huacai Chen <chenhuacai@loongson.cn>
4 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
5 */
6 #ifndef _ASM_FPU_H
7 #define _ASM_FPU_H
8
9 #include <linux/sched.h>
10 #include <linux/sched/task_stack.h>
11 #include <linux/ptrace.h>
12 #include <linux/thread_info.h>
13 #include <linux/bitops.h>
14
15 #include <asm/cpu.h>
16 #include <asm/cpu-features.h>
17 #include <asm/current.h>
18 #include <asm/loongarch.h>
19 #include <asm/processor.h>
20 #include <asm/ptrace.h>
21
22 struct sigcontext;
23
24 extern void kernel_fpu_begin(void);
25 extern void kernel_fpu_end(void);
26
27 extern void _init_fpu(unsigned int);
28 extern void _save_fp(struct loongarch_fpu *);
29 extern void _restore_fp(struct loongarch_fpu *);
30
31 extern void _save_lsx(struct loongarch_fpu *fpu);
32 extern void _restore_lsx(struct loongarch_fpu *fpu);
33 extern void _init_lsx_upper(void);
34 extern void _restore_lsx_upper(struct loongarch_fpu *fpu);
35
36 extern void _save_lasx(struct loongarch_fpu *fpu);
37 extern void _restore_lasx(struct loongarch_fpu *fpu);
38 extern void _init_lasx_upper(void);
39 extern void _restore_lasx_upper(struct loongarch_fpu *fpu);
40
41 static inline void enable_lsx(void);
42 static inline void disable_lsx(void);
43 static inline void save_lsx(struct task_struct *t);
44 static inline void restore_lsx(struct task_struct *t);
45
46 static inline void enable_lasx(void);
47 static inline void disable_lasx(void);
48 static inline void save_lasx(struct task_struct *t);
49 static inline void restore_lasx(struct task_struct *t);
50
51 /*
52 * Mask the FCSR Cause bits according to the Enable bits, observing
53 * that Unimplemented is always enabled.
54 */
mask_fcsr_x(unsigned long fcsr)55 static inline unsigned long mask_fcsr_x(unsigned long fcsr)
56 {
57 return fcsr & ((fcsr & FPU_CSR_ALL_E) <<
58 (ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E)));
59 }
60
is_fp_enabled(void)61 static inline int is_fp_enabled(void)
62 {
63 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ?
64 1 : 0;
65 }
66
is_lsx_enabled(void)67 static inline int is_lsx_enabled(void)
68 {
69 if (!cpu_has_lsx)
70 return 0;
71
72 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ?
73 1 : 0;
74 }
75
is_lasx_enabled(void)76 static inline int is_lasx_enabled(void)
77 {
78 if (!cpu_has_lasx)
79 return 0;
80
81 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ?
82 1 : 0;
83 }
84
is_simd_enabled(void)85 static inline int is_simd_enabled(void)
86 {
87 return is_lsx_enabled() | is_lasx_enabled();
88 }
89
90 #define enable_fpu() set_csr_euen(CSR_EUEN_FPEN)
91
92 #define disable_fpu() clear_csr_euen(CSR_EUEN_FPEN)
93
94 #define clear_fpu_owner() clear_thread_flag(TIF_USEDFPU)
95
is_fpu_owner(void)96 static inline int is_fpu_owner(void)
97 {
98 return test_thread_flag(TIF_USEDFPU);
99 }
100
__own_fpu(void)101 static inline void __own_fpu(void)
102 {
103 enable_fpu();
104 set_thread_flag(TIF_USEDFPU);
105 KSTK_EUEN(current) |= CSR_EUEN_FPEN;
106 }
107
own_fpu_inatomic(int restore)108 static inline void own_fpu_inatomic(int restore)
109 {
110 if (cpu_has_fpu && !is_fpu_owner()) {
111 __own_fpu();
112 if (restore)
113 _restore_fp(¤t->thread.fpu);
114 }
115 }
116
own_fpu(int restore)117 static inline void own_fpu(int restore)
118 {
119 preempt_disable();
120 own_fpu_inatomic(restore);
121 preempt_enable();
122 }
123
lose_fpu_inatomic(int save,struct task_struct * tsk)124 static inline void lose_fpu_inatomic(int save, struct task_struct *tsk)
125 {
126 if (is_fpu_owner()) {
127 if (!is_simd_enabled()) {
128 if (save)
129 _save_fp(&tsk->thread.fpu);
130 disable_fpu();
131 } else {
132 if (save) {
133 if (!is_lasx_enabled())
134 save_lsx(tsk);
135 else
136 save_lasx(tsk);
137 }
138 disable_fpu();
139 disable_lsx();
140 disable_lasx();
141 clear_tsk_thread_flag(tsk, TIF_USEDSIMD);
142 }
143 clear_tsk_thread_flag(tsk, TIF_USEDFPU);
144 }
145 KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN);
146 }
147
lose_fpu(int save)148 static inline void lose_fpu(int save)
149 {
150 preempt_disable();
151 lose_fpu_inatomic(save, current);
152 preempt_enable();
153 }
154
init_fpu(void)155 static inline void init_fpu(void)
156 {
157 unsigned int fcsr = current->thread.fpu.fcsr;
158
159 __own_fpu();
160 _init_fpu(fcsr);
161 set_used_math();
162 }
163
save_fp(struct task_struct * tsk)164 static inline void save_fp(struct task_struct *tsk)
165 {
166 if (cpu_has_fpu)
167 _save_fp(&tsk->thread.fpu);
168 }
169
restore_fp(struct task_struct * tsk)170 static inline void restore_fp(struct task_struct *tsk)
171 {
172 if (cpu_has_fpu)
173 _restore_fp(&tsk->thread.fpu);
174 }
175
save_fpu_regs(struct task_struct * tsk)176 static inline void save_fpu_regs(struct task_struct *tsk)
177 {
178 unsigned int euen;
179
180 if (tsk == current) {
181 preempt_disable();
182
183 euen = csr_read32(LOONGARCH_CSR_EUEN);
184
185 #ifdef CONFIG_CPU_HAS_LASX
186 if (euen & CSR_EUEN_LASXEN)
187 _save_lasx(¤t->thread.fpu);
188 else
189 #endif
190 #ifdef CONFIG_CPU_HAS_LSX
191 if (euen & CSR_EUEN_LSXEN)
192 _save_lsx(¤t->thread.fpu);
193 else
194 #endif
195 if (euen & CSR_EUEN_FPEN)
196 _save_fp(¤t->thread.fpu);
197
198 preempt_enable();
199 }
200 }
201
is_simd_owner(void)202 static inline int is_simd_owner(void)
203 {
204 return test_thread_flag(TIF_USEDSIMD);
205 }
206
207 #ifdef CONFIG_CPU_HAS_LSX
208
enable_lsx(void)209 static inline void enable_lsx(void)
210 {
211 if (cpu_has_lsx)
212 csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
213 }
214
disable_lsx(void)215 static inline void disable_lsx(void)
216 {
217 if (cpu_has_lsx)
218 csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
219 }
220
save_lsx(struct task_struct * t)221 static inline void save_lsx(struct task_struct *t)
222 {
223 if (cpu_has_lsx)
224 _save_lsx(&t->thread.fpu);
225 }
226
restore_lsx(struct task_struct * t)227 static inline void restore_lsx(struct task_struct *t)
228 {
229 if (cpu_has_lsx)
230 _restore_lsx(&t->thread.fpu);
231 }
232
init_lsx_upper(void)233 static inline void init_lsx_upper(void)
234 {
235 if (cpu_has_lsx)
236 _init_lsx_upper();
237 }
238
restore_lsx_upper(struct task_struct * t)239 static inline void restore_lsx_upper(struct task_struct *t)
240 {
241 if (cpu_has_lsx)
242 _restore_lsx_upper(&t->thread.fpu);
243 }
244
245 #else
enable_lsx(void)246 static inline void enable_lsx(void) {}
disable_lsx(void)247 static inline void disable_lsx(void) {}
save_lsx(struct task_struct * t)248 static inline void save_lsx(struct task_struct *t) {}
restore_lsx(struct task_struct * t)249 static inline void restore_lsx(struct task_struct *t) {}
init_lsx_upper(void)250 static inline void init_lsx_upper(void) {}
restore_lsx_upper(struct task_struct * t)251 static inline void restore_lsx_upper(struct task_struct *t) {}
252 #endif
253
254 #ifdef CONFIG_CPU_HAS_LASX
255
enable_lasx(void)256 static inline void enable_lasx(void)
257 {
258
259 if (cpu_has_lasx)
260 csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
261 }
262
disable_lasx(void)263 static inline void disable_lasx(void)
264 {
265 if (cpu_has_lasx)
266 csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
267 }
268
save_lasx(struct task_struct * t)269 static inline void save_lasx(struct task_struct *t)
270 {
271 if (cpu_has_lasx)
272 _save_lasx(&t->thread.fpu);
273 }
274
restore_lasx(struct task_struct * t)275 static inline void restore_lasx(struct task_struct *t)
276 {
277 if (cpu_has_lasx)
278 _restore_lasx(&t->thread.fpu);
279 }
280
init_lasx_upper(void)281 static inline void init_lasx_upper(void)
282 {
283 if (cpu_has_lasx)
284 _init_lasx_upper();
285 }
286
restore_lasx_upper(struct task_struct * t)287 static inline void restore_lasx_upper(struct task_struct *t)
288 {
289 if (cpu_has_lasx)
290 _restore_lasx_upper(&t->thread.fpu);
291 }
292
293 #else
enable_lasx(void)294 static inline void enable_lasx(void) {}
disable_lasx(void)295 static inline void disable_lasx(void) {}
save_lasx(struct task_struct * t)296 static inline void save_lasx(struct task_struct *t) {}
restore_lasx(struct task_struct * t)297 static inline void restore_lasx(struct task_struct *t) {}
init_lasx_upper(void)298 static inline void init_lasx_upper(void) {}
restore_lasx_upper(struct task_struct * t)299 static inline void restore_lasx_upper(struct task_struct *t) {}
300 #endif
301
thread_lsx_context_live(void)302 static inline int thread_lsx_context_live(void)
303 {
304 if (!cpu_has_lsx)
305 return 0;
306
307 return test_thread_flag(TIF_LSX_CTX_LIVE);
308 }
309
thread_lasx_context_live(void)310 static inline int thread_lasx_context_live(void)
311 {
312 if (!cpu_has_lasx)
313 return 0;
314
315 return test_thread_flag(TIF_LASX_CTX_LIVE);
316 }
317
318 #endif /* _ASM_FPU_H */
319