• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 Imagination Technologies
3  * Author: Paul Burton <paul.burton@imgtec.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License as published by the
7  * Free Software Foundation;  either version 2 of the  License, or (at your
8  * option) any later version.
9  */
10 #ifndef _ASM_MSA_H
11 #define _ASM_MSA_H
12 
13 #include <asm/mipsregs.h>
14 
15 #ifndef __ASSEMBLY__
16 
17 extern void _save_msa(struct task_struct *);
18 extern void _restore_msa(struct task_struct *);
19 extern void _init_msa_upper(void);
20 extern void _restore_msa_uppers_from_thread(union fpureg *);
21 
enable_msa(void)22 static inline void enable_msa(void)
23 {
24 	if (cpu_has_msa) {
25 		set_c0_config5(MIPS_CONF5_MSAEN);
26 		enable_fpu_hazard();
27 	}
28 }
29 
disable_msa(void)30 static inline void disable_msa(void)
31 {
32 	if (cpu_has_msa) {
33 		clear_c0_config5(MIPS_CONF5_MSAEN);
34 		disable_fpu_hazard();
35 	}
36 }
37 
is_msa_enabled(void)38 static inline int is_msa_enabled(void)
39 {
40 	if (!cpu_has_msa)
41 		return 0;
42 
43 	return read_c0_config5() & MIPS_CONF5_MSAEN;
44 }
45 
thread_msa_context_live(void)46 static inline int thread_msa_context_live(void)
47 {
48 	/*
49 	 * Check cpu_has_msa only if it's a constant. This will allow the
50 	 * compiler to optimise out code for CPUs without MSA without adding
51 	 * an extra redundant check for CPUs with MSA.
52 	 */
53 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
54 		return 0;
55 
56 	return test_thread_flag(TIF_MSA_CTX_LIVE);
57 }
58 
save_msa(struct task_struct * t)59 static inline void save_msa(struct task_struct *t)
60 {
61 	if (cpu_has_msa)
62 		_save_msa(t);
63 }
64 
restore_msa(struct task_struct * t)65 static inline void restore_msa(struct task_struct *t)
66 {
67 	if (cpu_has_msa)
68 		_restore_msa(t);
69 }
70 
71 #ifdef TOOLCHAIN_SUPPORTS_MSA
72 
73 #define __BUILD_MSA_CTL_REG(name, cs)				\
74 static inline unsigned int read_msa_##name(void)		\
75 {								\
76 	unsigned int reg;					\
77 	__asm__ __volatile__(					\
78 	"	.set	push\n"					\
79 	"       .set    fp=64\n"                                \
80 	"	.set	msa\n"					\
81 	"	cfcmsa	%0, $" #cs "\n"				\
82 	"	.set	pop\n"					\
83 	: "=r"(reg));						\
84 	return reg;						\
85 }								\
86 								\
87 static inline void write_msa_##name(unsigned int val)		\
88 {								\
89 	__asm__ __volatile__(					\
90 	"	.set	push\n"					\
91 	"       .set    fp=64\n"                                \
92 	"	.set	msa\n"					\
93 	"	ctcmsa	$" #cs ", %0\n"				\
94 	"	.set	pop\n"					\
95 	: : "r"(val));						\
96 }
97 
98 #else /* !TOOLCHAIN_SUPPORTS_MSA */
99 
100 /*
101  * Define functions using .word for the c[ft]cmsa instructions in order to
102  * allow compilation with toolchains that do not support MSA. Once all
103  * toolchains in use support MSA these can be removed.
104  */
105 #ifdef CONFIG_CPU_MICROMIPS
106 #define CFC_MSA_INSN	0x587e0056
107 #define CTC_MSA_INSN	0x583e0816
108 #else
109 #define CFC_MSA_INSN	0x787e0059
110 #define CTC_MSA_INSN	0x783e0819
111 #endif
112 
113 #define __BUILD_MSA_CTL_REG(name, cs)				\
114 static inline unsigned int read_msa_##name(void)		\
115 {								\
116 	unsigned int reg;					\
117 	__asm__ __volatile__(					\
118 	"	.set	push\n"					\
119 	"	.set	noat\n"					\
120 	"	.insn\n"					\
121 	"	.word	%1 | (" #cs " << 11)\n"			\
122 	"	move	%0, $1\n"				\
123 	"	.set	pop\n"					\
124 	: "=r"(reg) : "i"(CFC_MSA_INSN));			\
125 	return reg;						\
126 }								\
127 								\
128 static inline void write_msa_##name(unsigned int val)		\
129 {								\
130 	__asm__ __volatile__(					\
131 	"	.set	push\n"					\
132 	"	.set	noat\n"					\
133 	"	move	$1, %0\n"				\
134 	"	.insn\n"					\
135 	"	.word	%1 | (" #cs " << 6)\n"			\
136 	"	.set	pop\n"					\
137 	: : "r"(val), "i"(CTC_MSA_INSN));			\
138 }
139 
140 #endif /* !TOOLCHAIN_SUPPORTS_MSA */
141 
142 __BUILD_MSA_CTL_REG(ir, 0)
143 __BUILD_MSA_CTL_REG(csr, 1)
144 __BUILD_MSA_CTL_REG(access, 2)
145 __BUILD_MSA_CTL_REG(save, 3)
146 __BUILD_MSA_CTL_REG(modify, 4)
147 __BUILD_MSA_CTL_REG(request, 5)
148 __BUILD_MSA_CTL_REG(map, 6)
149 __BUILD_MSA_CTL_REG(unmap, 7)
150 
151 #endif /* !__ASSEMBLY__ */
152 
153 #define MSA_IR		0
154 #define MSA_CSR		1
155 #define MSA_ACCESS	2
156 #define MSA_SAVE	3
157 #define MSA_MODIFY	4
158 #define MSA_REQUEST	5
159 #define MSA_MAP		6
160 #define MSA_UNMAP	7
161 
162 /* MSA Implementation Register (MSAIR) */
163 #define MSA_IR_REVB		0
164 #define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
165 #define MSA_IR_PROCB		8
166 #define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
167 #define MSA_IR_WRPB		16
168 #define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
169 
170 /* MSA Control & Status Register (MSACSR) */
171 #define MSA_CSR_RMB		0
172 #define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
173 #define MSA_CSR_RM_NEAREST	0
174 #define MSA_CSR_RM_TO_ZERO	1
175 #define MSA_CSR_RM_TO_POS	2
176 #define MSA_CSR_RM_TO_NEG	3
177 #define MSA_CSR_FLAGSB		2
178 #define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
179 #define MSA_CSR_FLAGS_IB	2
180 #define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
181 #define MSA_CSR_FLAGS_UB	3
182 #define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
183 #define MSA_CSR_FLAGS_OB	4
184 #define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
185 #define MSA_CSR_FLAGS_ZB	5
186 #define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
187 #define MSA_CSR_FLAGS_VB	6
188 #define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
189 #define MSA_CSR_ENABLESB	7
190 #define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
191 #define MSA_CSR_ENABLES_IB	7
192 #define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
193 #define MSA_CSR_ENABLES_UB	8
194 #define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
195 #define MSA_CSR_ENABLES_OB	9
196 #define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
197 #define MSA_CSR_ENABLES_ZB	10
198 #define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
199 #define MSA_CSR_ENABLES_VB	11
200 #define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
201 #define MSA_CSR_CAUSEB		12
202 #define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
203 #define MSA_CSR_CAUSE_IB	12
204 #define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
205 #define MSA_CSR_CAUSE_UB	13
206 #define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
207 #define MSA_CSR_CAUSE_OB	14
208 #define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
209 #define MSA_CSR_CAUSE_ZB	15
210 #define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
211 #define MSA_CSR_CAUSE_VB	16
212 #define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
213 #define MSA_CSR_CAUSE_EB	17
214 #define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
215 #define MSA_CSR_NXB		18
216 #define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
217 #define MSA_CSR_FSB		24
218 #define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
219 
220 #endif /* _ASM_MSA_H */
221