• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#ifndef ASM_MACROS_S
7#define ASM_MACROS_S
8
9#include <arch.h>
10#include <common/asm_macros_common.S>
11#include <lib/spinlock.h>
12
13/*
14 * TLBI instruction with type specifier that implements the workaround for
15 * errata 813419 of Cortex-A57.
16 */
17#if ERRATA_A57_813419
18#define TLB_INVALIDATE(_reg, _coproc) \
19	stcopr	_reg, _coproc; \
20	dsb	ish; \
21	stcopr	_reg, _coproc
22#else
23#define TLB_INVALIDATE(_reg, _coproc) \
24	stcopr	_reg, _coproc
25#endif
26
27#define WORD_SIZE	4
28
29	/*
30	 * Co processor register accessors
31	 */
32	.macro ldcopr reg, coproc, opc1, CRn, CRm, opc2
33	mrc	\coproc, \opc1, \reg, \CRn, \CRm, \opc2
34	.endm
35
36	.macro ldcopr16 reg1, reg2, coproc, opc1, CRm
37	mrrc	\coproc, \opc1, \reg1, \reg2, \CRm
38	.endm
39
40	.macro stcopr reg, coproc, opc1, CRn, CRm, opc2
41	mcr	\coproc, \opc1, \reg, \CRn, \CRm, \opc2
42	.endm
43
44	.macro stcopr16 reg1, reg2, coproc, opc1, CRm
45	mcrr	\coproc, \opc1, \reg1, \reg2, \CRm
46	.endm
47
48	/* Cache line size helpers */
49	.macro	dcache_line_size  reg, tmp
50	ldcopr	\tmp, CTR
51	ubfx	\tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH
52	mov	\reg, #WORD_SIZE
53	lsl	\reg, \reg, \tmp
54	.endm
55
56	.macro	icache_line_size  reg, tmp
57	ldcopr	\tmp, CTR
58	and	\tmp, \tmp, #CTR_IMINLINE_MASK
59	mov	\reg, #WORD_SIZE
60	lsl	\reg, \reg, \tmp
61	.endm
62
63	/*
64	 * Declare the exception vector table, enforcing it is aligned on a
65	 * 32 byte boundary.
66	 */
67	.macro vector_base  label
68	.section .vectors, "ax"
69	.align 5
70	\label:
71	.endm
72
73	/*
74	 * This macro calculates the base address of the current CPU's multi
75	 * processor(MP) stack using the plat_my_core_pos() index, the name of
76	 * the stack storage and the size of each stack.
77	 * Out: r0 = physical address of stack base
78	 * Clobber: r14, r1, r2
79	 */
80	.macro get_my_mp_stack _name, _size
81	bl	plat_my_core_pos
82	ldr r2, =(\_name + \_size)
83	mov r1, #\_size
84	mla r0, r0, r1, r2
85	.endm
86
87	/*
88	 * This macro calculates the base address of a uniprocessor(UP) stack
89	 * using the name of the stack storage and the size of the stack
90	 * Out: r0 = physical address of stack base
91	 */
92	.macro get_up_stack _name, _size
93	ldr r0, =(\_name + \_size)
94	.endm
95
96#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
97	/*
98	 * Macro for mitigating against speculative execution.
99	 * ARMv7 cores without Virtualization extension do not support the
100	 * eret instruction.
101	 */
102	.macro exception_return
103	movs	pc, lr
104	dsb	nsh
105	isb
106	.endm
107
108#else
109	/*
110	 * Macro for mitigating against speculative execution beyond ERET.
111	 * If possible use Speculation Barrier instruction defined in ARMv8.5
112	 */
113	.macro exception_return
114	eret
115#if ARM_ARCH_AT_LEAST(8, 5)
116	sb
117#else
118	dsb	nsh
119	isb
120#endif
121	.endm
122#endif
123
124#if (ARM_ARCH_MAJOR == 7)
125	/* ARMv7 does not support stl instruction */
126	.macro stl _reg, _write_lock
127	dmb
128	str	\_reg, \_write_lock
129	dsb
130	.endm
131#endif
132
133	/*
134	 * Helper macro to generate the best mov/movw/movt combinations
135	 * according to the value to be moved.
136	 */
137	.macro mov_imm _reg, _val
138		.if ((\_val) & 0xffff0000) == 0
139			mov	\_reg, #(\_val)
140		.else
141			movw	\_reg, #((\_val) & 0xffff)
142			movt	\_reg, #((\_val) >> 16)
143		.endif
144	.endm
145
146	/*
147	 * Macro to mark instances where we're jumping to a function and don't
148	 * expect a return. To provide the function being jumped to with
149	 * additional information, we use 'bl' instruction to jump rather than
150	 * 'b'.
151         *
152	 * Debuggers infer the location of a call from where LR points to, which
153	 * is usually the instruction after 'bl'. If this macro expansion
154	 * happens to be the last location in a function, that'll cause the LR
155	 * to point a location beyond the function, thereby misleading debugger
156	 * back trace. We therefore insert a 'nop' after the function call for
157	 * debug builds, unless 'skip_nop' parameter is non-zero.
158	 */
159	.macro no_ret _func:req, skip_nop=0
160	bl	\_func
161#if DEBUG
162	.ifeq \skip_nop
163	nop
164	.endif
165#endif
166	.endm
167
168	/*
169	 * Reserve space for a spin lock in assembly file.
170	 */
171	.macro define_asm_spinlock _name:req
172	.align	SPINLOCK_ASM_ALIGN
173	\_name:
174	.space	SPINLOCK_ASM_SIZE
175	.endm
176
177	/*
178	 * Helper macro to OR the bottom 32 bits of `_val` into `_reg_l`
179	 * and the top 32 bits of `_val` into `_reg_h`.  If either the bottom
180	 * or top word of `_val` is zero, the corresponding OR operation
181	 * is skipped.
182	 */
183	.macro orr64_imm _reg_l, _reg_h, _val
184		.if (\_val >> 32)
185			orr \_reg_h, \_reg_h, #(\_val >> 32)
186		.endif
187		.if (\_val & 0xffffffff)
188			orr \_reg_l, \_reg_l, #(\_val & 0xffffffff)
189		.endif
190	.endm
191
192	/*
193	 * Helper macro to bitwise-clear bits in `_reg_l` and
194	 * `_reg_h` given a 64 bit immediate `_val`.  The set bits
195	 * in the bottom word of `_val` dictate which bits from
196	 * `_reg_l` should be cleared.  Similarly, the set bits in
197	 * the top word of `_val` dictate which bits from `_reg_h`
198	 * should be cleared.  If either the bottom or top word of
199	 * `_val` is zero, the corresponding BIC operation is skipped.
200	 */
201	.macro bic64_imm _reg_l, _reg_h, _val
202		.if (\_val >> 32)
203			bic \_reg_h, \_reg_h, #(\_val >> 32)
204		.endif
205		.if (\_val & 0xffffffff)
206			bic \_reg_l, \_reg_l, #(\_val & 0xffffffff)
207		.endif
208	.endm
209
210	/*
211	 * Helper macro for carrying out division in software when
212	 * hardware division is not suported. \top holds the dividend
213	 * in the function call and the remainder after
214	 * the function is executed. \bot holds the divisor. \div holds
215	 * the quotient and \temp is a temporary registed used in calcualtion.
216	 * The division algorithm has been obtained from:
217	 * http://www.keil.com/support/man/docs/armasm/armasm_dom1359731155623.htm
218	 */
219	.macro	softudiv	div:req,top:req,bot:req,temp:req
220
221	mov     \temp, \bot
222	cmp     \temp, \top, lsr #1
223div1:
224	movls   \temp, \temp, lsl #1
225	cmp     \temp, \top, lsr #1
226	bls     div1
227	mov     \div, #0
228
229div2:
230	cmp     \top, \temp
231	subcs   \top, \top,\temp
232	ADC     \div, \div, \div
233	mov     \temp, \temp, lsr #1
234	cmp     \temp, \bot
235	bhs     div2
236	.endm
237#endif /* ASM_MACROS_S */
238