• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Author: Huacai Chen <chenhuacai@loongson.cn>
4  * Copyright (C) 2020 Loongson Technology Corporation Limited
5  */
6 #ifndef _ASM_FPU_H
7 #define _ASM_FPU_H
8 
9 #include <linux/sched.h>
10 #include <linux/sched/task_stack.h>
11 #include <linux/ptrace.h>
12 #include <linux/thread_info.h>
13 #include <linux/bitops.h>
14 
15 #include <asm/cpu.h>
16 #include <asm/cpu-features.h>
17 #include <asm/current.h>
18 #include <asm/inst.h>
19 #include <asm/loongarchregs.h>
20 #include <asm/ptrace.h>
21 #include <asm/processor.h>
22 
23 struct sigcontext;
24 
25 extern void kernel_fpu_begin(void);
26 extern void kernel_fpu_end(void);
27 
28 extern void _init_fpu(unsigned int);
29 extern void _save_fp(struct loongarch_fpu *);
30 extern void _restore_fp(struct loongarch_fpu *);
31 
32 extern void _save_lsx(struct loongarch_fpu *fpu);
33 extern void _restore_lsx(struct loongarch_fpu *fpu);
34 extern void _init_lsx_upper(void);
35 extern void _restore_lsx_upper(struct loongarch_fpu *fpu);
36 
37 extern void _save_lasx(struct loongarch_fpu *fpu);
38 extern void _restore_lasx(struct loongarch_fpu *fpu);
39 extern void _init_lasx_upper(void);
40 extern void _restore_lasx_upper(struct loongarch_fpu *fpu);
41 
42 static inline void enable_lsx(void);
43 static inline void disable_lsx(void);
44 static inline void save_lsx(struct task_struct *t);
45 static inline void restore_lsx(struct task_struct *t);
46 
47 static inline void enable_lasx(void);
48 static inline void disable_lasx(void);
49 static inline void save_lasx(struct task_struct *t);
50 static inline void restore_lasx(struct task_struct *t);
51 
52 /*
53  * Mask the FCSR Cause bits according to the Enable bits, observing
54  * that Unimplemented is always enabled.
55  */
mask_fcsr_x(unsigned long fcsr)56 static inline unsigned long mask_fcsr_x(unsigned long fcsr)
57 {
58 	return fcsr & ((fcsr & FPU_CSR_ALL_E) <<
59 			(ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E)));
60 }
61 
is_fp_enabled(void)62 static inline int is_fp_enabled(void)
63 {
64 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ?
65 		1 : 0;
66 }
67 
is_lsx_enabled(void)68 static inline int is_lsx_enabled(void)
69 {
70 	if (!cpu_has_lsx)
71 		return 0;
72 
73 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ?
74 		1 : 0;
75 }
76 
is_lasx_enabled(void)77 static inline int is_lasx_enabled(void)
78 {
79 	if (!cpu_has_lasx)
80 		return 0;
81 
82 	return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ?
83 		1 : 0;
84 }
85 
is_simd_enabled(void)86 static inline int is_simd_enabled(void)
87 {
88 	return is_lsx_enabled() | is_lasx_enabled();
89 }
90 
91 #define enable_fpu()						\
92 do {								\
93 	set_csr_euen(CSR_EUEN_FPEN);				\
94 } while (0)
95 
96 #define disable_fpu()						\
97 do {								\
98 	clear_csr_euen(CSR_EUEN_FPEN);				\
99 } while (0)
100 
101 #define clear_fpu_owner()	clear_thread_flag(TIF_USEDFPU)
102 
is_fpu_owner(void)103 static inline int is_fpu_owner(void)
104 {
105 	return test_thread_flag(TIF_USEDFPU);
106 }
107 
__own_fpu(void)108 static inline void __own_fpu(void)
109 {
110 	enable_fpu();
111 	set_thread_flag(TIF_USEDFPU);
112 	KSTK_EUEN(current) |= CSR_EUEN_FPEN;
113 }
114 
own_fpu_inatomic(int restore)115 static inline void own_fpu_inatomic(int restore)
116 {
117 	if (cpu_has_fpu && !is_fpu_owner()) {
118 		__own_fpu();
119 		if (restore)
120 			_restore_fp(&current->thread.fpu);
121 	}
122 }
123 
own_fpu(int restore)124 static inline void own_fpu(int restore)
125 {
126 	preempt_disable();
127 	own_fpu_inatomic(restore);
128 	preempt_enable();
129 }
130 
lose_fpu_inatomic(int save,struct task_struct * tsk)131 static inline void lose_fpu_inatomic(int save, struct task_struct *tsk)
132 {
133 	if (is_fpu_owner()) {
134 		if (!is_simd_enabled()) {
135 			if (save)
136 				_save_fp(&tsk->thread.fpu);
137 			disable_fpu();
138 		} else {
139 			if (save) {
140 				if (!is_lasx_enabled())
141 					save_lsx(tsk);
142 				else
143 					save_lasx(tsk);
144 			}
145 			disable_fpu();
146 			disable_lsx();
147 			disable_lasx();
148 			clear_tsk_thread_flag(tsk, TIF_USEDSIMD);
149 		}
150 		clear_tsk_thread_flag(tsk, TIF_USEDFPU);
151 	}
152 	KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN);
153 }
154 
lose_fpu(int save)155 static inline void lose_fpu(int save)
156 {
157 	preempt_disable();
158 	lose_fpu_inatomic(save, current);
159 	preempt_enable();
160 }
161 
init_fpu(void)162 static inline void init_fpu(void)
163 {
164 	unsigned int fcsr = current->thread.fpu.fcsr;
165 
166 	__own_fpu();
167 	_init_fpu(fcsr);
168 	set_used_math();
169 }
170 
save_fp(struct task_struct * tsk)171 static inline void save_fp(struct task_struct *tsk)
172 {
173 	if (cpu_has_fpu)
174 		_save_fp(&tsk->thread.fpu);
175 }
176 
restore_fp(struct task_struct * tsk)177 static inline void restore_fp(struct task_struct *tsk)
178 {
179 	if (cpu_has_fpu)
180 		_restore_fp(&tsk->thread.fpu);
181 }
182 
save_fpu_regs(struct task_struct * tsk)183 static inline void save_fpu_regs(struct task_struct *tsk)
184 {
185 	unsigned int euen;
186 
187 	if (tsk == current) {
188 		preempt_disable();
189 
190 		euen = csr_read32(LOONGARCH_CSR_EUEN);
191 
192 #ifdef CONFIG_CPU_HAS_LASX
193 		if (euen & CSR_EUEN_LASXEN)
194 			_save_lasx(&current->thread.fpu);
195 		else
196 #endif
197 #ifdef CONFIG_CPU_HAS_LSX
198 		if (euen & CSR_EUEN_LSXEN)
199 			_save_lsx(&current->thread.fpu);
200 		else
201 #endif
202 		if (euen & CSR_EUEN_FPEN)
203 			_save_fp(&current->thread.fpu);
204 
205 		preempt_enable();
206 	}
207 }
208 
is_simd_owner(void)209 static inline int is_simd_owner(void)
210 {
211 	return test_thread_flag(TIF_USEDSIMD);
212 }
213 
214 #ifdef CONFIG_CPU_HAS_LSX
215 
enable_lsx(void)216 static inline void enable_lsx(void)
217 {
218 	if (cpu_has_lsx)
219 		csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
220 }
221 
disable_lsx(void)222 static inline void disable_lsx(void)
223 {
224 	if (cpu_has_lsx)
225 		csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
226 }
227 
save_lsx(struct task_struct * t)228 static inline void save_lsx(struct task_struct *t)
229 {
230 	if (cpu_has_lsx)
231 		_save_lsx(&t->thread.fpu);
232 }
233 
restore_lsx(struct task_struct * t)234 static inline void restore_lsx(struct task_struct *t)
235 {
236 	if (cpu_has_lsx)
237 		_restore_lsx(&t->thread.fpu);
238 }
239 
init_lsx_upper(void)240 static inline void init_lsx_upper(void)
241 {
242 	if (cpu_has_lsx)
243 		_init_lsx_upper();
244 }
245 
restore_lsx_upper(struct task_struct * t)246 static inline void restore_lsx_upper(struct task_struct *t)
247 {
248 	if (cpu_has_lsx)
249 		_restore_lsx_upper(&t->thread.fpu);
250 }
251 
252 #else
enable_lsx(void)253 static inline void enable_lsx(void) {}
disable_lsx(void)254 static inline void disable_lsx(void) {}
save_lsx(struct task_struct * t)255 static inline void save_lsx(struct task_struct *t) {}
restore_lsx(struct task_struct * t)256 static inline void restore_lsx(struct task_struct *t) {}
init_lsx_upper(void)257 static inline void init_lsx_upper(void) {}
restore_lsx_upper(struct task_struct * t)258 static inline void restore_lsx_upper(struct task_struct *t) {}
259 #endif
260 
261 #ifdef CONFIG_CPU_HAS_LASX
262 
enable_lasx(void)263 static inline void enable_lasx(void)
264 {
265 
266 	if (cpu_has_lasx)
267 		csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
268 }
269 
disable_lasx(void)270 static inline void disable_lasx(void)
271 {
272 	if (cpu_has_lasx)
273 		csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
274 }
275 
save_lasx(struct task_struct * t)276 static inline void save_lasx(struct task_struct *t)
277 {
278 	if (cpu_has_lasx)
279 		_save_lasx(&t->thread.fpu);
280 }
281 
restore_lasx(struct task_struct * t)282 static inline void restore_lasx(struct task_struct *t)
283 {
284 	if (cpu_has_lasx)
285 		_restore_lasx(&t->thread.fpu);
286 }
287 
init_lasx_upper(void)288 static inline void init_lasx_upper(void)
289 {
290 	if (cpu_has_lasx)
291 		_init_lasx_upper();
292 }
293 
restore_lasx_upper(struct task_struct * t)294 static inline void restore_lasx_upper(struct task_struct *t)
295 {
296 	if (cpu_has_lasx)
297 		_restore_lasx_upper(&t->thread.fpu);
298 }
299 
300 #else
enable_lasx(void)301 static inline void enable_lasx(void) {}
disable_lasx(void)302 static inline void disable_lasx(void) {}
save_lasx(struct task_struct * t)303 static inline void save_lasx(struct task_struct *t) {}
restore_lasx(struct task_struct * t)304 static inline void restore_lasx(struct task_struct *t) {}
init_lasx_upper(void)305 static inline void init_lasx_upper(void) {}
restore_lasx_upper(struct task_struct * t)306 static inline void restore_lasx_upper(struct task_struct *t) {}
307 #endif
308 
thread_lsx_context_live(void)309 static inline int thread_lsx_context_live(void)
310 {
311 	if (!cpu_has_lsx)
312 		return 0;
313 
314 	return test_thread_flag(TIF_LSX_CTX_LIVE);
315 }
316 
thread_lasx_context_live(void)317 static inline int thread_lasx_context_live(void)
318 {
319 	if (!cpu_has_lasx)
320 		return 0;
321 
322 	return test_thread_flag(TIF_LASX_CTX_LIVE);
323 }
324 
325 #endif /* _ASM_FPU_H */
326