• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 Imagination Technologies
3  * Author: Paul Burton <paul.burton@imgtec.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License as published by the
7  * Free Software Foundation;  either version 2 of the  License, or (at your
8  * option) any later version.
9  */
10 #ifndef _ASM_MSA_H
11 #define _ASM_MSA_H
12 
13 #include <asm/mipsregs.h>
14 
15 #ifndef __ASSEMBLY__
16 
17 #include <asm/inst.h>
18 
19 extern void _save_msa(struct task_struct *);
20 extern void _restore_msa(struct task_struct *);
21 extern void _init_msa_upper(void);
22 
23 extern void read_msa_wr_b(unsigned idx, union fpureg *to);
24 extern void read_msa_wr_h(unsigned idx, union fpureg *to);
25 extern void read_msa_wr_w(unsigned idx, union fpureg *to);
26 extern void read_msa_wr_d(unsigned idx, union fpureg *to);
27 
28 /**
29  * read_msa_wr() - Read a single MSA vector register
30  * @idx:	The index of the vector register to read
31  * @to:		The FPU register union to store the registers value in
32  * @fmt:	The format of the data in the vector register
33  *
34  * Read the value of MSA vector register idx into the FPU register
35  * union to, using the format fmt.
36  */
read_msa_wr(unsigned idx,union fpureg * to,enum msa_2b_fmt fmt)37 static inline void read_msa_wr(unsigned idx, union fpureg *to,
38 			       enum msa_2b_fmt fmt)
39 {
40 	switch (fmt) {
41 	case msa_fmt_b:
42 		read_msa_wr_b(idx, to);
43 		break;
44 
45 	case msa_fmt_h:
46 		read_msa_wr_h(idx, to);
47 		break;
48 
49 	case msa_fmt_w:
50 		read_msa_wr_w(idx, to);
51 		break;
52 
53 	case msa_fmt_d:
54 		read_msa_wr_d(idx, to);
55 		break;
56 
57 	default:
58 		BUG();
59 	}
60 }
61 
62 extern void write_msa_wr_b(unsigned idx, union fpureg *from);
63 extern void write_msa_wr_h(unsigned idx, union fpureg *from);
64 extern void write_msa_wr_w(unsigned idx, union fpureg *from);
65 extern void write_msa_wr_d(unsigned idx, union fpureg *from);
66 
67 /**
68  * write_msa_wr() - Write a single MSA vector register
69  * @idx:	The index of the vector register to write
70  * @from:	The FPU register union to take the registers value from
71  * @fmt:	The format of the data in the vector register
72  *
73  * Write the value from the FPU register union from into MSA vector
74  * register idx, using the format fmt.
75  */
write_msa_wr(unsigned idx,union fpureg * from,enum msa_2b_fmt fmt)76 static inline void write_msa_wr(unsigned idx, union fpureg *from,
77 				enum msa_2b_fmt fmt)
78 {
79 	switch (fmt) {
80 	case msa_fmt_b:
81 		write_msa_wr_b(idx, from);
82 		break;
83 
84 	case msa_fmt_h:
85 		write_msa_wr_h(idx, from);
86 		break;
87 
88 	case msa_fmt_w:
89 		write_msa_wr_w(idx, from);
90 		break;
91 
92 	case msa_fmt_d:
93 		write_msa_wr_d(idx, from);
94 		break;
95 
96 	default:
97 		BUG();
98 	}
99 }
100 
enable_msa(void)101 static inline void enable_msa(void)
102 {
103 	if (cpu_has_msa) {
104 		set_c0_config5(MIPS_CONF5_MSAEN);
105 		enable_fpu_hazard();
106 	}
107 }
108 
disable_msa(void)109 static inline void disable_msa(void)
110 {
111 	if (cpu_has_msa) {
112 		clear_c0_config5(MIPS_CONF5_MSAEN);
113 		disable_fpu_hazard();
114 	}
115 }
116 
is_msa_enabled(void)117 static inline int is_msa_enabled(void)
118 {
119 	if (!cpu_has_msa)
120 		return 0;
121 
122 	return read_c0_config5() & MIPS_CONF5_MSAEN;
123 }
124 
thread_msa_context_live(void)125 static inline int thread_msa_context_live(void)
126 {
127 	/*
128 	 * Check cpu_has_msa only if it's a constant. This will allow the
129 	 * compiler to optimise out code for CPUs without MSA without adding
130 	 * an extra redundant check for CPUs with MSA.
131 	 */
132 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
133 		return 0;
134 
135 	return test_thread_flag(TIF_MSA_CTX_LIVE);
136 }
137 
save_msa(struct task_struct * t)138 static inline void save_msa(struct task_struct *t)
139 {
140 	if (cpu_has_msa)
141 		_save_msa(t);
142 }
143 
restore_msa(struct task_struct * t)144 static inline void restore_msa(struct task_struct *t)
145 {
146 	if (cpu_has_msa)
147 		_restore_msa(t);
148 }
149 
init_msa_upper(void)150 static inline void init_msa_upper(void)
151 {
152 	/*
153 	 * Check cpu_has_msa only if it's a constant. This will allow the
154 	 * compiler to optimise out code for CPUs without MSA without adding
155 	 * an extra redundant check for CPUs with MSA.
156 	 */
157 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
158 		return;
159 
160 	_init_msa_upper();
161 }
162 
163 #ifdef TOOLCHAIN_SUPPORTS_MSA
164 
165 #define __BUILD_MSA_CTL_REG(name, cs)				\
166 static inline unsigned int read_msa_##name(void)		\
167 {								\
168 	unsigned int reg;					\
169 	__asm__ __volatile__(					\
170 	"	.set	push\n"					\
171 	"	.set	msa\n"					\
172 	"	cfcmsa	%0, $" #cs "\n"				\
173 	"	.set	pop\n"					\
174 	: "=r"(reg));						\
175 	return reg;						\
176 }								\
177 								\
178 static inline void write_msa_##name(unsigned int val)		\
179 {								\
180 	__asm__ __volatile__(					\
181 	"	.set	push\n"					\
182 	"	.set	msa\n"					\
183 	"	ctcmsa	$" #cs ", %0\n"				\
184 	"	.set	pop\n"					\
185 	: : "r"(val));						\
186 }
187 
188 #else /* !TOOLCHAIN_SUPPORTS_MSA */
189 
190 /*
191  * Define functions using .word for the c[ft]cmsa instructions in order to
192  * allow compilation with toolchains that do not support MSA. Once all
193  * toolchains in use support MSA these can be removed.
194  */
195 #ifdef CONFIG_CPU_MICROMIPS
196 #define CFC_MSA_INSN	0x587e0056
197 #define CTC_MSA_INSN	0x583e0816
198 #else
199 #define CFC_MSA_INSN	0x787e0059
200 #define CTC_MSA_INSN	0x783e0819
201 #endif
202 
203 #define __BUILD_MSA_CTL_REG(name, cs)				\
204 static inline unsigned int read_msa_##name(void)		\
205 {								\
206 	unsigned int reg;					\
207 	__asm__ __volatile__(					\
208 	"	.set	push\n"					\
209 	"	.set	noat\n"					\
210 	"	.insn\n"					\
211 	"	.word	%1 | (" #cs " << 11)\n"			\
212 	"	move	%0, $1\n"				\
213 	"	.set	pop\n"					\
214 	: "=r"(reg) : "i"(CFC_MSA_INSN));			\
215 	return reg;						\
216 }								\
217 								\
218 static inline void write_msa_##name(unsigned int val)		\
219 {								\
220 	__asm__ __volatile__(					\
221 	"	.set	push\n"					\
222 	"	.set	noat\n"					\
223 	"	move	$1, %0\n"				\
224 	"	.insn\n"					\
225 	"	.word	%1 | (" #cs " << 6)\n"			\
226 	"	.set	pop\n"					\
227 	: : "r"(val), "i"(CTC_MSA_INSN));			\
228 }
229 
230 #endif /* !TOOLCHAIN_SUPPORTS_MSA */
231 
232 __BUILD_MSA_CTL_REG(ir, 0)
233 __BUILD_MSA_CTL_REG(csr, 1)
234 __BUILD_MSA_CTL_REG(access, 2)
235 __BUILD_MSA_CTL_REG(save, 3)
236 __BUILD_MSA_CTL_REG(modify, 4)
237 __BUILD_MSA_CTL_REG(request, 5)
238 __BUILD_MSA_CTL_REG(map, 6)
239 __BUILD_MSA_CTL_REG(unmap, 7)
240 
241 #endif /* !__ASSEMBLY__ */
242 
243 #define MSA_IR		0
244 #define MSA_CSR		1
245 #define MSA_ACCESS	2
246 #define MSA_SAVE	3
247 #define MSA_MODIFY	4
248 #define MSA_REQUEST	5
249 #define MSA_MAP		6
250 #define MSA_UNMAP	7
251 
252 /* MSA Implementation Register (MSAIR) */
253 #define MSA_IR_REVB		0
254 #define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
255 #define MSA_IR_PROCB		8
256 #define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
257 #define MSA_IR_WRPB		16
258 #define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
259 
260 /* MSA Control & Status Register (MSACSR) */
261 #define MSA_CSR_RMB		0
262 #define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
263 #define MSA_CSR_RM_NEAREST	0
264 #define MSA_CSR_RM_TO_ZERO	1
265 #define MSA_CSR_RM_TO_POS	2
266 #define MSA_CSR_RM_TO_NEG	3
267 #define MSA_CSR_FLAGSB		2
268 #define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
269 #define MSA_CSR_FLAGS_IB	2
270 #define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
271 #define MSA_CSR_FLAGS_UB	3
272 #define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
273 #define MSA_CSR_FLAGS_OB	4
274 #define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
275 #define MSA_CSR_FLAGS_ZB	5
276 #define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
277 #define MSA_CSR_FLAGS_VB	6
278 #define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
279 #define MSA_CSR_ENABLESB	7
280 #define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
281 #define MSA_CSR_ENABLES_IB	7
282 #define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
283 #define MSA_CSR_ENABLES_UB	8
284 #define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
285 #define MSA_CSR_ENABLES_OB	9
286 #define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
287 #define MSA_CSR_ENABLES_ZB	10
288 #define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
289 #define MSA_CSR_ENABLES_VB	11
290 #define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
291 #define MSA_CSR_CAUSEB		12
292 #define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
293 #define MSA_CSR_CAUSE_IB	12
294 #define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
295 #define MSA_CSR_CAUSE_UB	13
296 #define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
297 #define MSA_CSR_CAUSE_OB	14
298 #define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
299 #define MSA_CSR_CAUSE_ZB	15
300 #define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
301 #define MSA_CSR_CAUSE_VB	16
302 #define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
303 #define MSA_CSR_CAUSE_EB	17
304 #define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
305 #define MSA_CSR_NXB		18
306 #define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
307 #define MSA_CSR_FSB		24
308 #define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
309 
310 #endif /* _ASM_MSA_H */
311