• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a32.h>
11#include <cpu_macros.S>
12
13
14	/* ---------------------------------------------
15	 * Disable intra-cluster coherency
16	 * Clobbers: r0-r1
17	 * ---------------------------------------------
18	 */
19func cortex_a32_disable_smp
20	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
21	bic	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
22	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
23	isb
24	dsb	sy
25	bx	lr
26endfunc cortex_a32_disable_smp
27
28	/* -------------------------------------------------
29	 * The CPU Ops reset function for Cortex-A32.
30	 * Clobbers: r0-r1
31	 * -------------------------------------------------
32	 */
33func cortex_a32_reset_func
34	/* ---------------------------------------------
35	 * Enable the SMP bit.
36	 * ---------------------------------------------
37	 */
38	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
39	orr	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
40	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
41	isb
42	bx	lr
43endfunc cortex_a32_reset_func
44
45	/* ----------------------------------------------------
46	 * The CPU Ops core power down function for Cortex-A32.
47	 * Clobbers: r0-r3
48	 * ----------------------------------------------------
49	 */
50func cortex_a32_core_pwr_dwn
51	/* r12 is pushed to meet the 8 byte stack alignment requirement */
52	push	{r12, lr}
53
54	/* Assert if cache is enabled */
55#if ENABLE_ASSERTIONS
56	ldcopr	r0, SCTLR
57	tst	r0, #SCTLR_C_BIT
58	ASM_ASSERT(eq)
59#endif
60
61	/* ---------------------------------------------
62	 * Flush L1 caches.
63	 * ---------------------------------------------
64	 */
65	mov	r0, #DC_OP_CISW
66	bl	dcsw_op_level1
67
68	/* ---------------------------------------------
69	 * Come out of intra cluster coherency
70	 * ---------------------------------------------
71	 */
72	pop	{r12, lr}
73	b	cortex_a32_disable_smp
74endfunc cortex_a32_core_pwr_dwn
75
76	/* -------------------------------------------------------
77	 * The CPU Ops cluster power down function for Cortex-A32.
78	 * Clobbers: r0-r3
79	 * -------------------------------------------------------
80	 */
81func cortex_a32_cluster_pwr_dwn
82	/* r12 is pushed to meet the 8 byte stack alignment requirement */
83	push	{r12, lr}
84
85	/* Assert if cache is enabled */
86#if ENABLE_ASSERTIONS
87	ldcopr	r0, SCTLR
88	tst	r0, #SCTLR_C_BIT
89	ASM_ASSERT(eq)
90#endif
91
92	/* ---------------------------------------------
93	 * Flush L1 cache.
94	 * ---------------------------------------------
95	 */
96	mov	r0, #DC_OP_CISW
97	bl	dcsw_op_level1
98
99	/* ---------------------------------------------
100	 * Disable the optional ACP.
101	 * ---------------------------------------------
102	 */
103	bl	plat_disable_acp
104
105	/* ---------------------------------------------
106	 * Flush L2 cache.
107	 * ---------------------------------------------
108	 */
109	mov	r0, #DC_OP_CISW
110	bl	dcsw_op_level2
111
112	/* ---------------------------------------------
113	 * Come out of intra cluster coherency
114	 * ---------------------------------------------
115	 */
116	pop	{r12, lr}
117	b	cortex_a32_disable_smp
118endfunc cortex_a32_cluster_pwr_dwn
119
120#if REPORT_ERRATA
121/*
122 * Errata printing function for Cortex-A32. Must follow AAPCS.
123 */
124func cortex_a32_errata_report
125	bx	lr
126endfunc cortex_a32_errata_report
127#endif
128
129declare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \
130	cortex_a32_reset_func, \
131	cortex_a32_core_pwr_dwn, \
132	cortex_a32_cluster_pwr_dwn
133