blob: f7d0595e155b1cec4b8357080c7b962a1de57438 [file] [log] [blame]
Soby Mathewf24307d2016-05-05 12:31:57 +01001/*
Jeenu Viswambharan0cc7aa82018-04-27 15:06:57 +01002 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
Soby Mathewf24307d2016-05-05 12:31:57 +01003 *
dp-arm82cb2c12017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathewf24307d2016-05-05 12:31:57 +01005 */
6#ifndef __ASM_MACROS_S__
7#define __ASM_MACROS_S__
8
9#include <arch.h>
10#include <asm_macros_common.S>
Jeenu Viswambharanb38bc682017-01-19 14:23:36 +000011#include <spinlock.h>
Soby Mathewf24307d2016-05-05 12:31:57 +010012
Jeenu Viswambharan0cc7aa82018-04-27 15:06:57 +010013/*
14 * TLBI instruction with type specifier that implements the workaround for
15 * errata 813419 of Cortex-A57.
16 */
17#if ERRATA_A57_813419
18#define TLB_INVALIDATE(_reg, _coproc) \
19 stcopr _reg, _coproc; \
20 dsb ish; \
21 stcopr _reg, _coproc
22#else
23#define TLB_INVALIDATE(_reg, _coproc) \
24 stcopr _reg, _coproc
25#endif
26
Soby Mathewf24307d2016-05-05 12:31:57 +010027#define WORD_SIZE 4
28
29 /*
30 * Co processor register accessors
31 */
32 .macro ldcopr reg, coproc, opc1, CRn, CRm, opc2
33 mrc \coproc, \opc1, \reg, \CRn, \CRm, \opc2
34 .endm
35
36 .macro ldcopr16 reg1, reg2, coproc, opc1, CRm
37 mrrc \coproc, \opc1, \reg1, \reg2, \CRm
38 .endm
39
40 .macro stcopr reg, coproc, opc1, CRn, CRm, opc2
41 mcr \coproc, \opc1, \reg, \CRn, \CRm, \opc2
42 .endm
43
44 .macro stcopr16 reg1, reg2, coproc, opc1, CRm
45 mcrr \coproc, \opc1, \reg1, \reg2, \CRm
46 .endm
47
48 /* Cache line size helpers */
49 .macro dcache_line_size reg, tmp
50 ldcopr \tmp, CTR
51 ubfx \tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH
52 mov \reg, #WORD_SIZE
53 lsl \reg, \reg, \tmp
54 .endm
55
56 .macro icache_line_size reg, tmp
57 ldcopr \tmp, CTR
58 and \tmp, \tmp, #CTR_IMINLINE_MASK
59 mov \reg, #WORD_SIZE
60 lsl \reg, \reg, \tmp
61 .endm
62
63 /*
Yatharth Kochar1a0a3f02016-06-28 16:58:26 +010064 * Declare the exception vector table, enforcing it is aligned on a
65 * 32 byte boundary.
66 */
67 .macro vector_base label
68 .section .vectors, "ax"
69 .align 5
70 \label:
71 .endm
72
73 /*
Soby Mathewf24307d2016-05-05 12:31:57 +010074 * This macro calculates the base address of the current CPU's multi
75 * processor(MP) stack using the plat_my_core_pos() index, the name of
76 * the stack storage and the size of each stack.
77 * Out: r0 = physical address of stack base
78 * Clobber: r14, r1, r2
79 */
80 .macro get_my_mp_stack _name, _size
81 bl plat_my_core_pos
82 ldr r2, =(\_name + \_size)
83 mov r1, #\_size
84 mla r0, r0, r1, r2
85 .endm
86
87 /*
88 * This macro calculates the base address of a uniprocessor(UP) stack
89 * using the name of the stack storage and the size of the stack
90 * Out: r0 = physical address of stack base
91 */
92 .macro get_up_stack _name, _size
93 ldr r0, =(\_name + \_size)
94 .endm
95
Etienne Carriere64cc6e92017-11-08 14:38:33 +010096#if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION)
97 /*
98 * ARMv7 cores without Virtualization extension do not support the
99 * eret instruction.
100 */
101 .macro eret
102 movs pc, lr
103 .endm
104#endif
105
Etienne Carriere0147bef2017-11-05 22:55:47 +0100106#if (ARM_ARCH_MAJOR == 7)
107 /* ARMv7 does not support stl instruction */
108 .macro stl _reg, _write_lock
109 dmb
110 str \_reg, \_write_lock
111 dsb
112 .endm
113#endif
114
Jeenu Viswambharana806dad2016-11-30 15:21:11 +0000115 /*
Etienne Carriere86606eb2017-09-01 10:22:20 +0200116 * Helper macro to generate the best mov/movw/movt combinations
117 * according to the value to be moved.
118 */
119 .macro mov_imm _reg, _val
120 .if ((\_val) & 0xffff0000) == 0
121 mov \_reg, #(\_val)
122 .else
123 movw \_reg, #((\_val) & 0xffff)
124 movt \_reg, #((\_val) >> 16)
125 .endif
126 .endm
127
128 /*
Jeenu Viswambharana806dad2016-11-30 15:21:11 +0000129 * Macro to mark instances where we're jumping to a function and don't
130 * expect a return. To provide the function being jumped to with
131 * additional information, we use 'bl' instruction to jump rather than
132 * 'b'.
133 *
134 * Debuggers infer the location of a call from where LR points to, which
135 * is usually the instruction after 'bl'. If this macro expansion
136 * happens to be the last location in a function, that'll cause the LR
137 * to point a location beyond the function, thereby misleading debugger
138 * back trace. We therefore insert a 'nop' after the function call for
139 * debug builds, unless 'skip_nop' parameter is non-zero.
140 */
141 .macro no_ret _func:req, skip_nop=0
142 bl \_func
143#if DEBUG
144 .ifeq \skip_nop
145 nop
146 .endif
147#endif
148 .endm
149
Jeenu Viswambharanb38bc682017-01-19 14:23:36 +0000150 /*
151 * Reserve space for a spin lock in assembly file.
152 */
153 .macro define_asm_spinlock _name:req
154 .align SPINLOCK_ASM_ALIGN
155 \_name:
156 .space SPINLOCK_ASM_SIZE
157 .endm
158
Yatharth Kochardc787582016-11-10 16:17:51 +0000159 /*
160 * Helper macro to OR the bottom 32 bits of `_val` into `_reg_l`
161 * and the top 32 bits of `_val` into `_reg_h`. If either the bottom
162 * or top word of `_val` is zero, the corresponding OR operation
163 * is skipped.
164 */
165 .macro orr64_imm _reg_l, _reg_h, _val
166 .if (\_val >> 32)
167 orr \_reg_h, \_reg_h, #(\_val >> 32)
168 .endif
169 .if (\_val & 0xffffffff)
170 orr \_reg_l, \_reg_l, #(\_val & 0xffffffff)
171 .endif
172 .endm
173
174 /*
175 * Helper macro to bitwise-clear bits in `_reg_l` and
176 * `_reg_h` given a 64 bit immediate `_val`. The set bits
177 * in the bottom word of `_val` dictate which bits from
178 * `_reg_l` should be cleared. Similarly, the set bits in
179 * the top word of `_val` dictate which bits from `_reg_h`
180 * should be cleared. If either the bottom or top word of
181 * `_val` is zero, the corresponding BIC operation is skipped.
182 */
183 .macro bic64_imm _reg_l, _reg_h, _val
184 .if (\_val >> 32)
185 bic \_reg_h, \_reg_h, #(\_val >> 32)
186 .endif
187 .if (\_val & 0xffffffff)
188 bic \_reg_l, \_reg_l, #(\_val & 0xffffffff)
189 .endif
190 .endm
191
Soby Mathewf24307d2016-05-05 12:31:57 +0100192#endif /* __ASM_MACROS_S__ */