• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#------------------------------------------------------------------------------
2#
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
5# Copyright (c) 2016, Linaro Limited. All rights reserved.
6#
7# This program and the accompanying materials
8# are licensed and made available under the terms and conditions of the BSD License
9# which accompanies this distribution.  The full text of the license may be found at
10# http://opensource.org/licenses/bsd-license.php
11#
12# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
13# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
14#
15#------------------------------------------------------------------------------
16
17#include <Chipset/AArch64.h>
18#include <AsmMacroIoLibV8.h>
19
20.set CTRL_M_BIT,      (1 << 0)
21.set CTRL_A_BIT,      (1 << 1)
22.set CTRL_C_BIT,      (1 << 2)
23.set CTRL_I_BIT,      (1 << 12)
24.set CTRL_V_BIT,      (1 << 12)
25.set CPACR_VFP_BITS,  (3 << 20)
26
27ASM_FUNC(ArmInvalidateDataCacheEntryByMVA)
28  dc      ivac, x0    // Invalidate single data cache line
29  ret
30
31
32ASM_FUNC(ArmCleanDataCacheEntryByMVA)
33  dc      cvac, x0    // Clean single data cache line
34  ret
35
36
37ASM_FUNC(ArmCleanDataCacheEntryToPoUByMVA)
38  dc      cvau, x0    // Clean single data cache line to PoU
39  ret
40
41ASM_FUNC(ArmInvalidateInstructionCacheEntryToPoUByMVA)
42  ic      ivau, x0    // Invalidate single instruction cache line to PoU
43  ret
44
45
46ASM_FUNC(ArmCleanInvalidateDataCacheEntryByMVA)
47  dc      civac, x0   // Clean and invalidate single data cache line
48  ret
49
50
51ASM_FUNC(ArmInvalidateDataCacheEntryBySetWay)
52  dc      isw, x0     // Invalidate this line
53  ret
54
55
56ASM_FUNC(ArmCleanInvalidateDataCacheEntryBySetWay)
57  dc      cisw, x0    // Clean and Invalidate this line
58  ret
59
60
61ASM_FUNC(ArmCleanDataCacheEntryBySetWay)
62  dc      csw, x0     // Clean this line
63  ret
64
65
66ASM_FUNC(ArmInvalidateInstructionCache)
67  ic      iallu       // Invalidate entire instruction cache
68  dsb     sy
69  isb
70  ret
71
72
73ASM_FUNC(ArmEnableMmu)
74   EL1_OR_EL2_OR_EL3(x1)
751: mrs     x0, sctlr_el1       // Read System control register EL1
76   b       4f
772: mrs     x0, sctlr_el2       // Read System control register EL2
78   b       4f
793: mrs     x0, sctlr_el3       // Read System control register EL3
804: orr     x0, x0, #CTRL_M_BIT // Set MMU enable bit
81   EL1_OR_EL2_OR_EL3(x1)
821: tlbi    vmalle1
83   dsb     nsh
84   isb
85   msr     sctlr_el1, x0       // Write back
86   b       4f
872: tlbi    alle2
88   dsb     nsh
89   isb
90   msr     sctlr_el2, x0       // Write back
91   b       4f
923: tlbi    alle3
93   dsb     nsh
94   isb
95   msr     sctlr_el3, x0       // Write back
964: isb
97   ret
98
99
100ASM_FUNC(ArmDisableMmu)
101   EL1_OR_EL2_OR_EL3(x1)
1021: mrs     x0, sctlr_el1        // Read System Control Register EL1
103   b       4f
1042: mrs     x0, sctlr_el2        // Read System Control Register EL2
105   b       4f
1063: mrs     x0, sctlr_el3        // Read System Control Register EL3
1074: and     x0, x0, #~CTRL_M_BIT  // Clear MMU enable bit
108   EL1_OR_EL2_OR_EL3(x1)
1091: msr     sctlr_el1, x0        // Write back
110   tlbi    vmalle1
111   b       4f
1122: msr     sctlr_el2, x0        // Write back
113   tlbi    alle2
114   b       4f
1153: msr     sctlr_el3, x0        // Write back
116   tlbi    alle3
1174: dsb     sy
118   isb
119   ret
120
121
122ASM_FUNC(ArmDisableCachesAndMmu)
123   EL1_OR_EL2_OR_EL3(x1)
1241: mrs     x0, sctlr_el1        // Get control register EL1
125   b       4f
1262: mrs     x0, sctlr_el2        // Get control register EL2
127   b       4f
1283: mrs     x0, sctlr_el3        // Get control register EL3
1294: mov     x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT)  // Disable MMU, D & I caches
130   and     x0, x0, x1
131   EL1_OR_EL2_OR_EL3(x1)
1321: msr     sctlr_el1, x0        // Write back control register
133   b       4f
1342: msr     sctlr_el2, x0        // Write back control register
135   b       4f
1363: msr     sctlr_el3, x0        // Write back control register
1374: dsb     sy
138   isb
139   ret
140
141
142ASM_FUNC(ArmMmuEnabled)
143   EL1_OR_EL2_OR_EL3(x1)
1441: mrs     x0, sctlr_el1        // Get control register EL1
145   b       4f
1462: mrs     x0, sctlr_el2        // Get control register EL2
147   b       4f
1483: mrs     x0, sctlr_el3        // Get control register EL3
1494: and     x0, x0, #CTRL_M_BIT
150   ret
151
152
153ASM_FUNC(ArmEnableDataCache)
154   EL1_OR_EL2_OR_EL3(x1)
1551: mrs     x0, sctlr_el1        // Get control register EL1
156   b       4f
1572: mrs     x0, sctlr_el2        // Get control register EL2
158   b       4f
1593: mrs     x0, sctlr_el3        // Get control register EL3
1604: orr     x0, x0, #CTRL_C_BIT  // Set C bit
161   EL1_OR_EL2_OR_EL3(x1)
1621: msr     sctlr_el1, x0        // Write back control register
163   b       4f
1642: msr     sctlr_el2, x0        // Write back control register
165   b       4f
1663: msr     sctlr_el3, x0        // Write back control register
1674: dsb     sy
168   isb
169   ret
170
171
172ASM_FUNC(ArmDisableDataCache)
173   EL1_OR_EL2_OR_EL3(x1)
1741: mrs     x0, sctlr_el1        // Get control register EL1
175   b       4f
1762: mrs     x0, sctlr_el2        // Get control register EL2
177   b       4f
1783: mrs     x0, sctlr_el3        // Get control register EL3
1794: and     x0, x0, #~CTRL_C_BIT  // Clear C bit
180   EL1_OR_EL2_OR_EL3(x1)
1811: msr     sctlr_el1, x0        // Write back control register
182   b       4f
1832: msr     sctlr_el2, x0        // Write back control register
184   b       4f
1853: msr     sctlr_el3, x0        // Write back control register
1864: dsb     sy
187   isb
188   ret
189
190
191ASM_FUNC(ArmEnableInstructionCache)
192   EL1_OR_EL2_OR_EL3(x1)
1931: mrs     x0, sctlr_el1        // Get control register EL1
194   b       4f
1952: mrs     x0, sctlr_el2        // Get control register EL2
196   b       4f
1973: mrs     x0, sctlr_el3        // Get control register EL3
1984: orr     x0, x0, #CTRL_I_BIT  // Set I bit
199   EL1_OR_EL2_OR_EL3(x1)
2001: msr     sctlr_el1, x0        // Write back control register
201   b       4f
2022: msr     sctlr_el2, x0        // Write back control register
203   b       4f
2043: msr     sctlr_el3, x0        // Write back control register
2054: dsb     sy
206   isb
207   ret
208
209
210ASM_FUNC(ArmDisableInstructionCache)
211   EL1_OR_EL2_OR_EL3(x1)
2121: mrs     x0, sctlr_el1        // Get control register EL1
213   b       4f
2142: mrs     x0, sctlr_el2        // Get control register EL2
215   b       4f
2163: mrs     x0, sctlr_el3        // Get control register EL3
2174: and     x0, x0, #~CTRL_I_BIT  // Clear I bit
218   EL1_OR_EL2_OR_EL3(x1)
2191: msr     sctlr_el1, x0        // Write back control register
220   b       4f
2212: msr     sctlr_el2, x0        // Write back control register
222   b       4f
2233: msr     sctlr_el3, x0        // Write back control register
2244: dsb     sy
225   isb
226   ret
227
228
229ASM_FUNC(ArmEnableAlignmentCheck)
230   EL1_OR_EL2(x1)
2311: mrs     x0, sctlr_el1        // Get control register EL1
232   b       3f
2332: mrs     x0, sctlr_el2        // Get control register EL2
2343: orr     x0, x0, #CTRL_A_BIT  // Set A (alignment check) bit
235   EL1_OR_EL2(x1)
2361: msr     sctlr_el1, x0        // Write back control register
237   b       3f
2382: msr     sctlr_el2, x0        // Write back control register
2393: dsb     sy
240   isb
241   ret
242
243
244ASM_FUNC(ArmDisableAlignmentCheck)
245   EL1_OR_EL2_OR_EL3(x1)
2461: mrs     x0, sctlr_el1        // Get control register EL1
247   b       4f
2482: mrs     x0, sctlr_el2        // Get control register EL2
249   b       4f
2503: mrs     x0, sctlr_el3        // Get control register EL3
2514: and     x0, x0, #~CTRL_A_BIT  // Clear A (alignment check) bit
252   EL1_OR_EL2_OR_EL3(x1)
2531: msr     sctlr_el1, x0        // Write back control register
254   b       4f
2552: msr     sctlr_el2, x0        // Write back control register
256   b       4f
2573: msr     sctlr_el3, x0        // Write back control register
2584: dsb     sy
259   isb
260   ret
261
262
263// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
264ASM_FUNC(ArmEnableBranchPrediction)
265  ret
266
267
268// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
269ASM_FUNC(ArmDisableBranchPrediction)
270  ret
271
272
273ASM_FUNC(AArch64AllDataCachesOperation)
274// We can use regs 0-7 and 9-15 without having to save/restore.
275// Save our link register on the stack. - The stack must always be quad-word aligned
276  str   x30, [sp, #-16]!
277  mov   x1, x0                  // Save Function call in x1
278  mrs   x6, clidr_el1           // Read EL1 CLIDR
279  and   x3, x6, #0x7000000      // Mask out all but Level of Coherency (LoC)
280  lsr   x3, x3, #23             // Left align cache level value - the level is shifted by 1 to the
281                                // right to ease the access to CSSELR and the Set/Way operation.
282  cbz   x3, L_Finished          // No need to clean if LoC is 0
283  mov   x10, #0                 // Start clean at cache level 0
284
285Loop1:
286  add   x2, x10, x10, lsr #1    // Work out 3x cachelevel for cache info
287  lsr   x12, x6, x2             // bottom 3 bits are the Cache type for this level
288  and   x12, x12, #7            // get those 3 bits alone
289  cmp   x12, #2                 // what cache at this level?
290  b.lt  L_Skip                  // no cache or only instruction cache at this level
291  msr   csselr_el1, x10         // write the Cache Size selection register with current level (CSSELR)
292  isb                           // isb to sync the change to the CacheSizeID reg
293  mrs   x12, ccsidr_el1         // reads current Cache Size ID register (CCSIDR)
294  and   x2, x12, #0x7           // extract the line length field
295  add   x2, x2, #4              // add 4 for the line length offset (log2 16 bytes)
296  mov   x4, #0x400
297  sub   x4, x4, #1
298  and   x4, x4, x12, lsr #3     // x4 is the max number on the way size (right aligned)
299  clz   w5, w4                  // w5 is the bit position of the way size increment
300  mov   x7, #0x00008000
301  sub   x7, x7, #1
302  and   x7, x7, x12, lsr #13    // x7 is the max number of the index size (right aligned)
303
304Loop2:
305  mov   x9, x4                  // x9 working copy of the max way size (right aligned)
306
307Loop3:
308  lsl   x11, x9, x5
309  orr   x0, x10, x11            // factor in the way number and cache number
310  lsl   x11, x7, x2
311  orr   x0, x0, x11             // factor in the index number
312
313  blr   x1                      // Goto requested cache operation
314
315  subs  x9, x9, #1              // decrement the way number
316  b.ge  Loop3
317  subs  x7, x7, #1              // decrement the index
318  b.ge  Loop2
319L_Skip:
320  add   x10, x10, #2            // increment the cache number
321  cmp   x3, x10
322  b.gt  Loop1
323
324L_Finished:
325  dsb   sy
326  isb
327  ldr   x30, [sp], #0x10
328  ret
329
330
331ASM_FUNC(ArmDataMemoryBarrier)
332  dmb   sy
333  ret
334
335
336ASM_FUNC(ArmDataSynchronizationBarrier)
337  dsb   sy
338  ret
339
340
341ASM_FUNC(ArmInstructionSynchronizationBarrier)
342  isb
343  ret
344
345
346ASM_FUNC(ArmWriteVBar)
347   EL1_OR_EL2_OR_EL3(x1)
3481: msr   vbar_el1, x0            // Set the Address of the EL1 Vector Table in the VBAR register
349   b     4f
3502: msr   vbar_el2, x0            // Set the Address of the EL2 Vector Table in the VBAR register
351   b     4f
3523: msr   vbar_el3, x0            // Set the Address of the EL3 Vector Table in the VBAR register
3534: isb
354   ret
355
356ASM_FUNC(ArmReadVBar)
357   EL1_OR_EL2_OR_EL3(x1)
3581: mrs   x0, vbar_el1            // Set the Address of the EL1 Vector Table in the VBAR register
359   ret
3602: mrs   x0, vbar_el2            // Set the Address of the EL2 Vector Table in the VBAR register
361   ret
3623: mrs   x0, vbar_el3            // Set the Address of the EL3 Vector Table in the VBAR register
363   ret
364
365
366ASM_FUNC(ArmEnableVFP)
367  // Check whether floating-point is implemented in the processor.
368  mov   x1, x30                 // Save LR
369  bl    ArmReadIdPfr0           // Read EL1 Processor Feature Register (PFR0)
370  mov   x30, x1                 // Restore LR
371  ands  x0, x0, #AARCH64_PFR0_FP// Extract bits indicating VFP implementation
372  cmp   x0, #0                  // VFP is implemented if '0'.
373  b.ne  4f                      // Exit if VFP not implemented.
374  // FVP is implemented.
375  // Make sure VFP exceptions are not trapped (to any exception level).
376  mrs   x0, cpacr_el1           // Read EL1 Coprocessor Access Control Register (CPACR)
377  orr   x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
378  msr   cpacr_el1, x0           // Write back EL1 Coprocessor Access Control Register (CPACR)
379  mov   x1, #AARCH64_CPTR_TFP   // TFP Bit for trapping VFP Exceptions
380  EL1_OR_EL2_OR_EL3(x2)
3811:ret                           // Not configurable in EL1
3822:mrs   x0, cptr_el2            // Disable VFP traps to EL2
383  bic   x0, x0, x1
384  msr   cptr_el2, x0
385  ret
3863:mrs   x0, cptr_el3            // Disable VFP traps to EL3
387  bic   x0, x0, x1
388  msr   cptr_el3, x0
3894:ret
390
391
392ASM_FUNC(ArmCallWFI)
393  wfi
394  ret
395
396
397ASM_FUNC(ArmReadMpidr)
398  mrs   x0, mpidr_el1           // read EL1 MPIDR
399  ret
400
401
402// Keep old function names for C compatibilty for now. Change later?
403ASM_FUNC(ArmReadTpidrurw)
404  mrs   x0, tpidr_el0           // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
405  ret
406
407
408// Keep old function names for C compatibilty for now. Change later?
409ASM_FUNC(ArmWriteTpidrurw)
410  msr   tpidr_el0, x0           // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
411  ret
412
413
414// Arch timers are mandatory on AArch64
415ASM_FUNC(ArmIsArchTimerImplemented)
416  mov   x0, #1
417  ret
418
419
420ASM_FUNC(ArmReadIdPfr0)
421  mrs   x0, id_aa64pfr0_el1   // Read ID_AA64PFR0 Register
422  ret
423
424
425// Q: id_aa64pfr1_el1 not defined yet. What does this funtion want to access?
426// A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
427//    See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
428//    Not defined yet, but stick in here for now, should read all zeros.
429ASM_FUNC(ArmReadIdPfr1)
430  mrs   x0, id_aa64pfr1_el1   // Read ID_PFR1 Register
431  ret
432
433// VOID ArmWriteHcr(UINTN Hcr)
434ASM_FUNC(ArmWriteHcr)
435  msr   hcr_el2, x0        // Write the passed HCR value
436  ret
437
438// UINTN ArmReadHcr(VOID)
439ASM_FUNC(ArmReadHcr)
440  mrs   x0, hcr_el2
441  ret
442
443// UINTN ArmReadCurrentEL(VOID)
444ASM_FUNC(ArmReadCurrentEL)
445  mrs   x0, CurrentEL
446  ret
447
448ASM_FUNCTION_REMOVE_IF_UNREFERENCED
449