1 /* SPDX-License-Identifier: GPL-2.0-only */
2 #ifndef _ARM_SMMU_V3_COMMON_H
3 #define _ARM_SMMU_V3_COMMON_H
4
5 #include <linux/bitfield.h>
6 #include <linux/bits.h>
7
8 /* MMIO registers */
9 #define ARM_SMMU_IDR0 0x0
10 #define IDR0_ST_LVL GENMASK(28, 27)
11 #define IDR0_ST_LVL_2LVL 1
12 #define IDR0_STALL_MODEL GENMASK(25, 24)
13 #define IDR0_STALL_MODEL_STALL 0
14 #define IDR0_STALL_MODEL_FORCE 2
15 #define IDR0_TTENDIAN GENMASK(22, 21)
16 #define IDR0_TTENDIAN_MIXED 0
17 #define IDR0_TTENDIAN_LE 2
18 #define IDR0_TTENDIAN_BE 3
19 #define IDR0_CD2L (1 << 19)
20 #define IDR0_VMID16 (1 << 18)
21 #define IDR0_PRI (1 << 16)
22 #define IDR0_SEV (1 << 14)
23 #define IDR0_MSI (1 << 13)
24 #define IDR0_ASID16 (1 << 12)
25 #define IDR0_ATS (1 << 10)
26 #define IDR0_HYP (1 << 9)
27 #define IDR0_HTTU GENMASK(7, 6)
28 #define IDR0_HTTU_ACCESS 1
29 #define IDR0_HTTU_ACCESS_DIRTY 2
30 #define IDR0_COHACC (1 << 4)
31 #define IDR0_TTF GENMASK(3, 2)
32 #define IDR0_TTF_AARCH64 2
33 #define IDR0_TTF_AARCH32_64 3
34 #define IDR0_S1P (1 << 1)
35 #define IDR0_S2P (1 << 0)
36
37 #define ARM_SMMU_IDR1 0x4
38 #define IDR1_TABLES_PRESET (1 << 30)
39 #define IDR1_QUEUES_PRESET (1 << 29)
40 #define IDR1_REL (1 << 28)
41 #define IDR1_ATTR_TYPES_OVR (1 << 27)
42 #define IDR1_CMDQS GENMASK(25, 21)
43 #define IDR1_EVTQS GENMASK(20, 16)
44 #define IDR1_PRIQS GENMASK(15, 11)
45 #define IDR1_SSIDSIZE GENMASK(10, 6)
46 #define IDR1_SIDSIZE GENMASK(5, 0)
47
48 #define ARM_SMMU_IDR3 0xc
49 #define IDR3_RIL (1 << 10)
50
51 #define ARM_SMMU_IDR5 0x14
52 #define IDR5_STALL_MAX GENMASK(31, 16)
53 #define IDR5_GRAN64K (1 << 6)
54 #define IDR5_GRAN16K (1 << 5)
55 #define IDR5_GRAN4K (1 << 4)
56 #define IDR5_OAS GENMASK(2, 0)
57 #define IDR5_OAS_32_BIT 0
58 #define IDR5_OAS_36_BIT 1
59 #define IDR5_OAS_40_BIT 2
60 #define IDR5_OAS_42_BIT 3
61 #define IDR5_OAS_44_BIT 4
62 #define IDR5_OAS_48_BIT 5
63 #define IDR5_OAS_52_BIT 6
64 #define IDR5_VAX GENMASK(11, 10)
65 #define IDR5_VAX_52_BIT 1
66
67 #define ARM_SMMU_IIDR 0x18
68 #define IIDR_PRODUCTID GENMASK(31, 20)
69 #define IIDR_VARIANT GENMASK(19, 16)
70 #define IIDR_REVISION GENMASK(15, 12)
71 #define IIDR_IMPLEMENTER GENMASK(11, 0)
72
73 #define ARM_SMMU_CR0 0x20
74 #define CR0_ATSCHK (1 << 4)
75 #define CR0_CMDQEN (1 << 3)
76 #define CR0_EVTQEN (1 << 2)
77 #define CR0_PRIQEN (1 << 1)
78 #define CR0_SMMUEN (1 << 0)
79
80 #define ARM_SMMU_CR0ACK 0x24
81
82 #define ARM_SMMU_CR1 0x28
83 #define CR1_TABLE_SH GENMASK(11, 10)
84 #define CR1_TABLE_OC GENMASK(9, 8)
85 #define CR1_TABLE_IC GENMASK(7, 6)
86 #define CR1_QUEUE_SH GENMASK(5, 4)
87 #define CR1_QUEUE_OC GENMASK(3, 2)
88 #define CR1_QUEUE_IC GENMASK(1, 0)
89 /* CR1 cacheability fields don't quite follow the usual TCR-style encoding */
90 #define CR1_CACHE_NC 0
91 #define CR1_CACHE_WB 1
92 #define CR1_CACHE_WT 2
93
94 #define ARM_SMMU_CR2 0x2c
95 #define CR2_PTM (1 << 2)
96 #define CR2_RECINVSID (1 << 1)
97 #define CR2_E2H (1 << 0)
98
99 #define ARM_SMMU_GBPA 0x44
100 #define GBPA_UPDATE (1 << 31)
101 #define GBPA_ABORT (1 << 20)
102
103 #define ARM_SMMU_IRQ_CTRL 0x50
104 #define IRQ_CTRL_EVTQ_IRQEN (1 << 2)
105 #define IRQ_CTRL_PRIQ_IRQEN (1 << 1)
106 #define IRQ_CTRL_GERROR_IRQEN (1 << 0)
107
108 #define ARM_SMMU_IRQ_CTRLACK 0x54
109
110 #define ARM_SMMU_GERROR 0x60
111 #define GERROR_SFM_ERR (1 << 8)
112 #define GERROR_MSI_GERROR_ABT_ERR (1 << 7)
113 #define GERROR_MSI_PRIQ_ABT_ERR (1 << 6)
114 #define GERROR_MSI_EVTQ_ABT_ERR (1 << 5)
115 #define GERROR_MSI_CMDQ_ABT_ERR (1 << 4)
116 #define GERROR_PRIQ_ABT_ERR (1 << 3)
117 #define GERROR_EVTQ_ABT_ERR (1 << 2)
118 #define GERROR_CMDQ_ERR (1 << 0)
119 #define GERROR_ERR_MASK 0x1fd
120
121 #define ARM_SMMU_GERRORN 0x64
122
123 #define ARM_SMMU_GERROR_IRQ_CFG0 0x68
124 #define ARM_SMMU_GERROR_IRQ_CFG1 0x70
125 #define ARM_SMMU_GERROR_IRQ_CFG2 0x74
126
127 #define ARM_SMMU_STRTAB_BASE 0x80
128 #define STRTAB_BASE_RA (1UL << 62)
129 #define STRTAB_BASE_ADDR_MASK GENMASK_ULL(51, 6)
130
131 #define ARM_SMMU_STRTAB_BASE_CFG 0x88
132 #define STRTAB_BASE_CFG_FMT GENMASK(17, 16)
133 #define STRTAB_BASE_CFG_FMT_LINEAR 0
134 #define STRTAB_BASE_CFG_FMT_2LVL 1
135 #define STRTAB_BASE_CFG_SPLIT GENMASK(10, 6)
136 #define STRTAB_BASE_CFG_LOG2SIZE GENMASK(5, 0)
137
138 #define ARM_SMMU_CMDQ_BASE 0x90
139 #define ARM_SMMU_CMDQ_PROD 0x98
140 #define ARM_SMMU_CMDQ_CONS 0x9c
141
142 #define ARM_SMMU_EVTQ_BASE 0xa0
143 #define ARM_SMMU_EVTQ_PROD 0xa8
144 #define ARM_SMMU_EVTQ_CONS 0xac
145 #define ARM_SMMU_EVTQ_IRQ_CFG0 0xb0
146 #define ARM_SMMU_EVTQ_IRQ_CFG1 0xb8
147 #define ARM_SMMU_EVTQ_IRQ_CFG2 0xbc
148
149 #define ARM_SMMU_PRIQ_BASE 0xc0
150 #define ARM_SMMU_PRIQ_PROD 0xc8
151 #define ARM_SMMU_PRIQ_CONS 0xcc
152 #define ARM_SMMU_PRIQ_IRQ_CFG0 0xd0
153 #define ARM_SMMU_PRIQ_IRQ_CFG1 0xd8
154 #define ARM_SMMU_PRIQ_IRQ_CFG2 0xdc
155
156 #define ARM_SMMU_REG_SZ 0xe00
157
158 /* Common MSI config fields */
159 #define MSI_CFG0_ADDR_MASK GENMASK_ULL(51, 2)
160 #define MSI_CFG2_SH GENMASK(5, 4)
161 #define MSI_CFG2_MEMATTR GENMASK(3, 0)
162
163 /* Common memory attribute values */
164 #define ARM_SMMU_SH_NSH 0
165 #define ARM_SMMU_SH_OSH 2
166 #define ARM_SMMU_SH_ISH 3
167 #define ARM_SMMU_MEMATTR_DEVICE_nGnRE 0x1
168 #define ARM_SMMU_MEMATTR_OIWB 0xf
169
170 #define Q_BASE_RWA (1UL << 62)
171 #define Q_BASE_ADDR_MASK GENMASK_ULL(51, 5)
172 #define Q_BASE_LOG2SIZE GENMASK(4, 0)
173
174 /*
175 * Stream table.
176 *
177 * Linear: Enough to cover 1 << IDR1.SIDSIZE entries
178 * 2lvl: 128k L1 entries,
179 * 256 lazy entries per table (each table covers a PCI bus)
180 */
181 #define STRTAB_SPLIT 8
182
183 #define STRTAB_L1_DESC_SPAN GENMASK_ULL(4, 0)
184 #define STRTAB_L1_DESC_L2PTR_MASK GENMASK_ULL(51, 6)
185
186 #define STRTAB_STE_DWORDS 8
187
188 struct arm_smmu_ste {
189 __le64 data[STRTAB_STE_DWORDS];
190 };
191
192 #define STRTAB_NUM_L2_STES (1 << STRTAB_SPLIT)
193 struct arm_smmu_strtab_l2 {
194 struct arm_smmu_ste stes[STRTAB_NUM_L2_STES];
195 };
196
197 struct arm_smmu_strtab_l1 {
198 __le64 l2ptr;
199 };
200 #define STRTAB_MAX_L1_ENTRIES (1 << 17)
201
202 struct arm_smmu_strtab_cfg {
203 union {
204 struct {
205 struct arm_smmu_ste *table;
206 dma_addr_t ste_dma;
207 unsigned int num_ents;
208 } linear;
209 struct {
210 struct arm_smmu_strtab_l1 *l1tab;
211 struct arm_smmu_strtab_l2 **l2ptrs;
212 dma_addr_t l1_dma;
213 unsigned int num_l1_ents;
214 } l2;
215 };
216 };
217
arm_smmu_strtab_l1_idx(u32 sid)218 static inline u32 arm_smmu_strtab_l1_idx(u32 sid)
219 {
220 return sid / STRTAB_NUM_L2_STES;
221 }
222
arm_smmu_strtab_l2_idx(u32 sid)223 static inline u32 arm_smmu_strtab_l2_idx(u32 sid)
224 {
225 return sid % STRTAB_NUM_L2_STES;
226 }
227
arm_smmu_write_strtab_l1_desc(struct arm_smmu_strtab_l1 * dst,dma_addr_t l2ptr_dma)228 static inline void arm_smmu_write_strtab_l1_desc(struct arm_smmu_strtab_l1 *dst,
229 dma_addr_t l2ptr_dma)
230 {
231 u64 val = 0;
232
233 val |= FIELD_PREP(STRTAB_L1_DESC_SPAN, STRTAB_SPLIT + 1);
234 val |= l2ptr_dma & STRTAB_L1_DESC_L2PTR_MASK;
235
236 /* The HW has 64 bit atomicity with stores to the L2 STE table */
237 WRITE_ONCE(dst->l2ptr, cpu_to_le64(val));
238 }
239
240 #define STRTAB_STE_0_V (1UL << 0)
241 #define STRTAB_STE_0_CFG GENMASK_ULL(3, 1)
242 #define STRTAB_STE_0_CFG_ABORT 0
243 #define STRTAB_STE_0_CFG_BYPASS 4
244 #define STRTAB_STE_0_CFG_S1_TRANS 5
245 #define STRTAB_STE_0_CFG_S2_TRANS 6
246
247 #define STRTAB_STE_0_S1FMT GENMASK_ULL(5, 4)
248 #define STRTAB_STE_0_S1FMT_LINEAR 0
249 #define STRTAB_STE_0_S1FMT_64K_L2 2
250 #define STRTAB_STE_0_S1CTXPTR_MASK GENMASK_ULL(51, 6)
251 #define STRTAB_STE_0_S1CDMAX GENMASK_ULL(63, 59)
252
253 #define STRTAB_STE_1_S1DSS GENMASK_ULL(1, 0)
254 #define STRTAB_STE_1_S1DSS_TERMINATE 0x0
255 #define STRTAB_STE_1_S1DSS_BYPASS 0x1
256 #define STRTAB_STE_1_S1DSS_SSID0 0x2
257
258 #define STRTAB_STE_1_S1C_CACHE_NC 0UL
259 #define STRTAB_STE_1_S1C_CACHE_WBRA 1UL
260 #define STRTAB_STE_1_S1C_CACHE_WT 2UL
261 #define STRTAB_STE_1_S1C_CACHE_WB 3UL
262 #define STRTAB_STE_1_S1CIR GENMASK_ULL(3, 2)
263 #define STRTAB_STE_1_S1COR GENMASK_ULL(5, 4)
264 #define STRTAB_STE_1_S1CSH GENMASK_ULL(7, 6)
265
266 #define STRTAB_STE_1_S1STALLD (1UL << 27)
267
268 #define STRTAB_STE_1_EATS GENMASK_ULL(29, 28)
269 #define STRTAB_STE_1_EATS_ABT 0UL
270 #define STRTAB_STE_1_EATS_TRANS 1UL
271 #define STRTAB_STE_1_EATS_S1CHK 2UL
272
273 #define STRTAB_STE_1_STRW GENMASK_ULL(31, 30)
274 #define STRTAB_STE_1_STRW_NSEL1 0UL
275 #define STRTAB_STE_1_STRW_EL2 2UL
276
277 #define STRTAB_STE_1_SHCFG GENMASK_ULL(45, 44)
278 #define STRTAB_STE_1_SHCFG_INCOMING 1UL
279
280 #define STRTAB_STE_2_S2VMID GENMASK_ULL(15, 0)
281 #define STRTAB_STE_2_VTCR GENMASK_ULL(50, 32)
282 #define STRTAB_STE_2_VTCR_S2T0SZ GENMASK_ULL(5, 0)
283 #define STRTAB_STE_2_VTCR_S2SL0 GENMASK_ULL(7, 6)
284 #define STRTAB_STE_2_VTCR_S2IR0 GENMASK_ULL(9, 8)
285 #define STRTAB_STE_2_VTCR_S2OR0 GENMASK_ULL(11, 10)
286 #define STRTAB_STE_2_VTCR_S2SH0 GENMASK_ULL(13, 12)
287 #define STRTAB_STE_2_VTCR_S2TG GENMASK_ULL(15, 14)
288 #define STRTAB_STE_2_VTCR_S2PS GENMASK_ULL(18, 16)
289 #define STRTAB_STE_2_S2AA64 (1UL << 51)
290 #define STRTAB_STE_2_S2ENDI (1UL << 52)
291 #define STRTAB_STE_2_S2PTW (1UL << 54)
292 #define STRTAB_STE_2_S2S (1UL << 57)
293 #define STRTAB_STE_2_S2R (1UL << 58)
294
295 #define STRTAB_STE_3_S2TTB_MASK GENMASK_ULL(51, 4)
296
297 /*
298 * Context descriptors.
299 *
300 * Linear: when less than 1024 SSIDs are supported
301 * 2lvl: at most 1024 L1 entries,
302 * 1024 lazy entries per table.
303 */
304 #define CTXDESC_L2_ENTRIES 1024
305
306 #define CTXDESC_L1_DESC_V (1UL << 0)
307 #define CTXDESC_L1_DESC_L2PTR_MASK GENMASK_ULL(51, 12)
308
309 #define CTXDESC_CD_DWORDS 8
310
311 struct arm_smmu_cd {
312 __le64 data[CTXDESC_CD_DWORDS];
313 };
314
315 struct arm_smmu_cdtab_l2 {
316 struct arm_smmu_cd cds[CTXDESC_L2_ENTRIES];
317 };
318
319 struct arm_smmu_cdtab_l1 {
320 __le64 l2ptr;
321 };
322
arm_smmu_cdtab_l1_idx(unsigned int ssid)323 static inline unsigned int arm_smmu_cdtab_l1_idx(unsigned int ssid)
324 {
325 return ssid / CTXDESC_L2_ENTRIES;
326 }
327
arm_smmu_cdtab_l2_idx(unsigned int ssid)328 static inline unsigned int arm_smmu_cdtab_l2_idx(unsigned int ssid)
329 {
330 return ssid % CTXDESC_L2_ENTRIES;
331 }
332
333 #define CTXDESC_CD_0_TCR_T0SZ GENMASK_ULL(5, 0)
334 #define CTXDESC_CD_0_TCR_TG0 GENMASK_ULL(7, 6)
335 #define CTXDESC_CD_0_TCR_IRGN0 GENMASK_ULL(9, 8)
336 #define CTXDESC_CD_0_TCR_ORGN0 GENMASK_ULL(11, 10)
337 #define CTXDESC_CD_0_TCR_SH0 GENMASK_ULL(13, 12)
338 #define CTXDESC_CD_0_TCR_EPD0 (1ULL << 14)
339 #define CTXDESC_CD_0_TCR_EPD1 (1ULL << 30)
340
341 #define CTXDESC_CD_0_ENDI (1UL << 15)
342 #define CTXDESC_CD_0_V (1UL << 31)
343
344 #define CTXDESC_CD_0_TCR_IPS GENMASK_ULL(34, 32)
345 #define CTXDESC_CD_0_TCR_TBI0 (1ULL << 38)
346
347 #define CTXDESC_CD_0_TCR_HA (1UL << 43)
348 #define CTXDESC_CD_0_TCR_HD (1UL << 42)
349
350 #define CTXDESC_CD_0_AA64 (1UL << 41)
351 #define CTXDESC_CD_0_S (1UL << 44)
352 #define CTXDESC_CD_0_R (1UL << 45)
353 #define CTXDESC_CD_0_A (1UL << 46)
354 #define CTXDESC_CD_0_ASET (1UL << 47)
355 #define CTXDESC_CD_0_ASID GENMASK_ULL(63, 48)
356
357 #define CTXDESC_CD_1_TTB0_MASK GENMASK_ULL(51, 4)
358
359 /*
360 * When the SMMU only supports linear context descriptor tables, pick a
361 * reasonable size limit (64kB).
362 */
363 #define CTXDESC_LINEAR_CDMAX ilog2(SZ_64K / sizeof(struct arm_smmu_cd))
364
365 /* Command queue */
366 #define CMDQ_ENT_SZ_SHIFT 4
367 #define CMDQ_ENT_DWORDS ((1 << CMDQ_ENT_SZ_SHIFT) >> 3)
368 #define CMDQ_MAX_SZ_SHIFT (Q_MAX_SZ_SHIFT - CMDQ_ENT_SZ_SHIFT)
369
370 #define CMDQ_CONS_ERR GENMASK(30, 24)
371 #define CMDQ_ERR_CERROR_NONE_IDX 0
372 #define CMDQ_ERR_CERROR_ILL_IDX 1
373 #define CMDQ_ERR_CERROR_ABT_IDX 2
374 #define CMDQ_ERR_CERROR_ATC_INV_IDX 3
375
376 #define CMDQ_0_OP GENMASK_ULL(7, 0)
377 #define CMDQ_0_SSV (1UL << 11)
378
379 #define CMDQ_PREFETCH_0_SID GENMASK_ULL(63, 32)
380 #define CMDQ_PREFETCH_1_SIZE GENMASK_ULL(4, 0)
381 #define CMDQ_PREFETCH_1_ADDR_MASK GENMASK_ULL(63, 12)
382
383 #define CMDQ_CFGI_0_SSID GENMASK_ULL(31, 12)
384 #define CMDQ_CFGI_0_SID GENMASK_ULL(63, 32)
385 #define CMDQ_CFGI_1_LEAF (1UL << 0)
386 #define CMDQ_CFGI_1_RANGE GENMASK_ULL(4, 0)
387
388 #define CMDQ_TLBI_0_NUM GENMASK_ULL(16, 12)
389 #define CMDQ_TLBI_RANGE_NUM_MAX 31
390 #define CMDQ_TLBI_0_SCALE GENMASK_ULL(24, 20)
391 #define CMDQ_TLBI_0_VMID GENMASK_ULL(47, 32)
392 #define CMDQ_TLBI_0_ASID GENMASK_ULL(63, 48)
393 #define CMDQ_TLBI_1_LEAF (1UL << 0)
394 #define CMDQ_TLBI_1_TTL GENMASK_ULL(9, 8)
395 #define CMDQ_TLBI_1_TG GENMASK_ULL(11, 10)
396 #define CMDQ_TLBI_1_VA_MASK GENMASK_ULL(63, 12)
397 #define CMDQ_TLBI_1_IPA_MASK GENMASK_ULL(51, 12)
398
399 #define CMDQ_ATC_0_SSID GENMASK_ULL(31, 12)
400 #define CMDQ_ATC_0_SID GENMASK_ULL(63, 32)
401 #define CMDQ_ATC_0_GLOBAL (1UL << 9)
402 #define CMDQ_ATC_1_SIZE GENMASK_ULL(5, 0)
403 #define CMDQ_ATC_1_ADDR_MASK GENMASK_ULL(63, 12)
404
405 #define CMDQ_PRI_0_SSID GENMASK_ULL(31, 12)
406 #define CMDQ_PRI_0_SID GENMASK_ULL(63, 32)
407 #define CMDQ_PRI_1_GRPID GENMASK_ULL(8, 0)
408 #define CMDQ_PRI_1_RESP GENMASK_ULL(13, 12)
409
410 #define CMDQ_RESUME_0_RESP_TERM 0UL
411 #define CMDQ_RESUME_0_RESP_RETRY 1UL
412 #define CMDQ_RESUME_0_RESP_ABORT 2UL
413 #define CMDQ_RESUME_0_RESP GENMASK_ULL(13, 12)
414 #define CMDQ_RESUME_0_SID GENMASK_ULL(63, 32)
415 #define CMDQ_RESUME_1_STAG GENMASK_ULL(15, 0)
416
417 #define CMDQ_SYNC_0_CS GENMASK_ULL(13, 12)
418 #define CMDQ_SYNC_0_CS_NONE 0
419 #define CMDQ_SYNC_0_CS_IRQ 1
420 #define CMDQ_SYNC_0_CS_SEV 2
421 #define CMDQ_SYNC_0_MSH GENMASK_ULL(23, 22)
422 #define CMDQ_SYNC_0_MSIATTR GENMASK_ULL(27, 24)
423 #define CMDQ_SYNC_0_MSIDATA GENMASK_ULL(63, 32)
424 #define CMDQ_SYNC_1_MSIADDR_MASK GENMASK_ULL(51, 2)
425
426 /* Event queue */
427 #define EVTQ_ENT_SZ_SHIFT 5
428 #define EVTQ_ENT_DWORDS ((1 << EVTQ_ENT_SZ_SHIFT) >> 3)
429 #define EVTQ_MAX_SZ_SHIFT (Q_MAX_SZ_SHIFT - EVTQ_ENT_SZ_SHIFT)
430
431 #define EVTQ_0_ID GENMASK_ULL(7, 0)
432
433 #define EVT_ID_TRANSLATION_FAULT 0x10
434 #define EVT_ID_ADDR_SIZE_FAULT 0x11
435 #define EVT_ID_ACCESS_FAULT 0x12
436 #define EVT_ID_PERMISSION_FAULT 0x13
437
438 #define EVTQ_0_SSV (1UL << 11)
439 #define EVTQ_0_SSID GENMASK_ULL(31, 12)
440 #define EVTQ_0_SID GENMASK_ULL(63, 32)
441 #define EVTQ_1_STAG GENMASK_ULL(15, 0)
442 #define EVTQ_1_STALL (1UL << 31)
443 #define EVTQ_1_PnU (1UL << 33)
444 #define EVTQ_1_InD (1UL << 34)
445 #define EVTQ_1_RnW (1UL << 35)
446 #define EVTQ_1_S2 (1UL << 39)
447 #define EVTQ_1_CLASS GENMASK_ULL(41, 40)
448 #define EVTQ_1_TT_READ (1UL << 44)
449 #define EVTQ_2_ADDR GENMASK_ULL(63, 0)
450 #define EVTQ_3_IPA GENMASK_ULL(51, 12)
451
452 /* PRI queue */
453 #define PRIQ_ENT_SZ_SHIFT 4
454 #define PRIQ_ENT_DWORDS ((1 << PRIQ_ENT_SZ_SHIFT) >> 3)
455 #define PRIQ_MAX_SZ_SHIFT (Q_MAX_SZ_SHIFT - PRIQ_ENT_SZ_SHIFT)
456
457 #define PRIQ_0_SID GENMASK_ULL(31, 0)
458 #define PRIQ_0_SSID GENMASK_ULL(51, 32)
459 #define PRIQ_0_PERM_PRIV (1UL << 58)
460 #define PRIQ_0_PERM_EXEC (1UL << 59)
461 #define PRIQ_0_PERM_READ (1UL << 60)
462 #define PRIQ_0_PERM_WRITE (1UL << 61)
463 #define PRIQ_0_PRG_LAST (1UL << 62)
464 #define PRIQ_0_SSID_V (1UL << 63)
465
466 #define PRIQ_1_PRG_IDX GENMASK_ULL(8, 0)
467 #define PRIQ_1_ADDR_MASK GENMASK_ULL(63, 12)
468
469 /* Synthesized features */
470 #define ARM_SMMU_FEAT_2_LVL_STRTAB (1 << 0)
471 #define ARM_SMMU_FEAT_2_LVL_CDTAB (1 << 1)
472 #define ARM_SMMU_FEAT_TT_LE (1 << 2)
473 #define ARM_SMMU_FEAT_TT_BE (1 << 3)
474 #define ARM_SMMU_FEAT_PRI (1 << 4)
475 #define ARM_SMMU_FEAT_ATS (1 << 5)
476 #define ARM_SMMU_FEAT_SEV (1 << 6)
477 #define ARM_SMMU_FEAT_MSI (1 << 7)
478 #define ARM_SMMU_FEAT_COHERENCY (1 << 8)
479 #define ARM_SMMU_FEAT_TRANS_S1 (1 << 9)
480 #define ARM_SMMU_FEAT_TRANS_S2 (1 << 10)
481 #define ARM_SMMU_FEAT_STALLS (1 << 11)
482 #define ARM_SMMU_FEAT_HYP (1 << 12)
483 #define ARM_SMMU_FEAT_STALL_FORCE (1 << 13)
484 #define ARM_SMMU_FEAT_VAX (1 << 14)
485 #define ARM_SMMU_FEAT_RANGE_INV (1 << 15)
486 #define ARM_SMMU_FEAT_BTM (1 << 16)
487 #define ARM_SMMU_FEAT_SVA (1 << 17)
488 #define ARM_SMMU_FEAT_E2H (1 << 18)
489 #define ARM_SMMU_FEAT_NESTING (1 << 19)
490 #define ARM_SMMU_FEAT_ATTR_TYPES_OVR (1 << 20)
491 #define ARM_SMMU_FEAT_HA (1 << 21)
492 #define ARM_SMMU_FEAT_HD (1 << 22)
493
494 enum pri_resp {
495 PRI_RESP_DENY = 0,
496 PRI_RESP_FAIL = 1,
497 PRI_RESP_SUCC = 2,
498 };
499
500 struct arm_smmu_cmdq_ent {
501 /* Common fields */
502 u8 opcode;
503 bool substream_valid;
504
505 /* Command-specific fields */
506 union {
507 #define CMDQ_OP_PREFETCH_CFG 0x1
508 struct {
509 u32 sid;
510 } prefetch;
511
512 #define CMDQ_OP_CFGI_STE 0x3
513 #define CMDQ_OP_CFGI_ALL 0x4
514 #define CMDQ_OP_CFGI_CD 0x5
515 #define CMDQ_OP_CFGI_CD_ALL 0x6
516 struct {
517 u32 sid;
518 u32 ssid;
519 union {
520 bool leaf;
521 u8 span;
522 };
523 } cfgi;
524
525 #define CMDQ_OP_TLBI_NH_ASID 0x11
526 #define CMDQ_OP_TLBI_NH_VA 0x12
527 #define CMDQ_OP_TLBI_EL2_ALL 0x20
528 #define CMDQ_OP_TLBI_EL2_ASID 0x21
529 #define CMDQ_OP_TLBI_EL2_VA 0x22
530 #define CMDQ_OP_TLBI_S12_VMALL 0x28
531 #define CMDQ_OP_TLBI_S2_IPA 0x2a
532 #define CMDQ_OP_TLBI_NSNH_ALL 0x30
533 struct {
534 u8 num;
535 u8 scale;
536 u16 asid;
537 u16 vmid;
538 bool leaf;
539 u8 ttl;
540 u8 tg;
541 u64 addr;
542 } tlbi;
543
544 #define CMDQ_OP_ATC_INV 0x40
545 #define ATC_INV_SIZE_ALL 52
546 struct {
547 u32 sid;
548 u32 ssid;
549 u64 addr;
550 u8 size;
551 bool global;
552 } atc;
553
554 #define CMDQ_OP_PRI_RESP 0x41
555 struct {
556 u32 sid;
557 u32 ssid;
558 u16 grpid;
559 enum pri_resp resp;
560 } pri;
561
562 #define CMDQ_OP_RESUME 0x44
563 struct {
564 u32 sid;
565 u16 stag;
566 u8 resp;
567 } resume;
568
569 #define CMDQ_OP_CMD_SYNC 0x46
570 struct {
571 u64 msiaddr;
572 } sync;
573 };
574 };
575
576 #define Q_OVERFLOW_FLAG (1U << 31)
577 #define Q_OVF(p) ((p) & Q_OVERFLOW_FLAG)
578
579 /*
580 * This is used to size the command queue and therefore must be at least
581 * BITS_PER_LONG so that the valid_map works correctly (it relies on the
582 * total number of queue entries being a multiple of BITS_PER_LONG).
583 */
584 #define CMDQ_BATCH_ENTRIES BITS_PER_LONG
585
586 struct arm_smmu_cmdq_batch {
587 u64 cmds[CMDQ_BATCH_ENTRIES * CMDQ_ENT_DWORDS];
588 struct arm_smmu_cmdq *cmdq;
589 int num;
590 };
591
592 #endif /* _ARM_SMMU_V3_COMMON_H */
593