blob: 00c7d88bed6b30936327c103b70131de0b4fde2a [file] [log] [blame]
Achin Gupta4f6ad662013-10-25 09:08:21 +01001/*
Dan Handleye2bf57f2015-04-01 17:34:24 +01002 * Copyright (c) 2013-2015, ARM Limited and Contributors. All rights reserved.
Achin Gupta4f6ad662013-10-25 09:08:21 +01003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
Dan Handleye2bf57f2015-04-01 17:34:24 +010030#ifndef __ASM_MACROS_S__
31#define __ASM_MACROS_S__
Achin Gupta4f6ad662013-10-25 09:08:21 +010032
Dan Handley97043ac2014-04-09 13:14:54 +010033#include <arch.h>
34
35
Achin Gupta4f6ad662013-10-25 09:08:21 +010036 .macro func_prologue
37 stp x29, x30, [sp, #-0x10]!
38 mov x29,sp
39 .endm
40
41 .macro func_epilogue
42 ldp x29, x30, [sp], #0x10
43 .endm
44
45
46 .macro dcache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000047 mrs \tmp, ctr_el0
Achin Gupta4f6ad662013-10-25 09:08:21 +010048 ubfx \tmp, \tmp, #16, #4
Achin Gupta07f4e072014-02-02 12:02:23 +000049 mov \reg, #4
50 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010051 .endm
52
53
54 .macro icache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000055 mrs \tmp, ctr_el0
56 and \tmp, \tmp, #0xf
57 mov \reg, #4
58 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010059 .endm
60
61
Achin Gupta4f6ad662013-10-25 09:08:21 +010062 .macro smc_check label
Andrew Thoelke7935d0a2014-04-28 12:32:02 +010063 mrs x0, esr_el3
Achin Gupta4f6ad662013-10-25 09:08:21 +010064 ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
65 cmp x0, #EC_AARCH64_SMC
66 b.ne $label
67 .endm
68
Sandrine Bailleuxe0ae9fa2016-05-24 16:56:03 +010069 /*
70 * Declare the exception vector table, enforcing it is aligned on a
71 * 2KB boundary, as required by the ARMv8 architecture.
72 */
73 .macro vector_base label
74 .section .vectors, "ax"
75 .align 11
76 \label:
77 .endm
Achin Gupta4f6ad662013-10-25 09:08:21 +010078
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000079 /*
Sandrine Bailleuxe0ae9fa2016-05-24 16:56:03 +010080 * Create an entry in the exception vector table, enforcing it is
81 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
82 */
83 .macro vector_entry label
84 .section .vectors, "ax"
85 .align 7
86 \label:
87 .endm
88
89 /*
90 * This macro verifies that the given vector doesn't exceed the
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000091 * architectural limit of 32 instructions. This is meant to be placed
Sandrine Bailleuxe0ae9fa2016-05-24 16:56:03 +010092 * immediately after the last instruction in the vector. It takes the
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000093 * vector entry as the parameter
94 */
95 .macro check_vector_size since
96 .if (. - \since) > (32 * 4)
97 .error "Vector exceeds 32 instructions"
98 .endif
99 .endm
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000100
101 /*
102 * This macro is used to create a function label and place the
103 * code into a separate text section based on the function name
104 * to enable elimination of unused code during linking
105 */
106 .macro func _name
107 .section .text.\_name, "ax"
108 .type \_name, %function
Kévin Petit8b779622015-03-24 14:03:57 +0000109 .func \_name
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000110 \_name:
111 .endm
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000112
Kévin Petit8b779622015-03-24 14:03:57 +0000113 /*
114 * This macro is used to mark the end of a function.
115 */
116 .macro endfunc _name
117 .endfunc
118 .size \_name, . - \_name
119 .endm
120
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000121 /*
Soby Mathew5c8babc2015-07-13 16:26:11 +0100122 * Theses macros are used to create function labels for deprecated
Soby Mathew7a24cba2015-10-26 14:29:21 +0000123 * APIs. If ERROR_DEPRECATED is non zero, the callers of these APIs
Soby Mathew5c8babc2015-07-13 16:26:11 +0100124 * will fail to link and cause build failure.
125 */
Soby Mathew7a24cba2015-10-26 14:29:21 +0000126#if ERROR_DEPRECATED
Soby Mathew5c8babc2015-07-13 16:26:11 +0100127 .macro func_deprecated _name
128 func deprecated\_name
129 .endm
130
131 .macro endfunc_deprecated _name
132 endfunc deprecated\_name
133 .endm
134#else
135 .macro func_deprecated _name
136 func \_name
137 .endm
138
139 .macro endfunc_deprecated _name
140 endfunc \_name
141 .endm
142#endif
143
144 /*
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000145 * This macro declares an array of 1 or more stacks, properly
146 * aligned and in the requested section
147 */
148#define STACK_ALIGN 6
149
150 .macro declare_stack _name, _section, _size, _count
151 .if ((\_size & ((1 << STACK_ALIGN) - 1)) <> 0)
152 .error "Stack size not correctly aligned"
153 .endif
154 .section \_section, "aw", %nobits
155 .align STACK_ALIGN
156 \_name:
157 .space ((\_count) * (\_size)), 0
158 .endm
159
Soby Mathew67487842015-07-13 14:10:57 +0100160#if ENABLE_PLAT_COMPAT
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000161 /*
162 * This macro calculates the base address of an MP stack using the
163 * platform_get_core_pos() index, the name of the stack storage and
164 * the size of each stack
165 * In: X0 = MPIDR of CPU whose stack is wanted
166 * Out: X0 = physical address of stack base
167 * Clobber: X30, X1, X2
168 */
169 .macro get_mp_stack _name, _size
170 bl platform_get_core_pos
171 ldr x2, =(\_name + \_size)
172 mov x1, #\_size
173 madd x0, x0, x1, x2
174 .endm
Soby Mathew67487842015-07-13 14:10:57 +0100175#endif
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000176
177 /*
Soby Mathew12d0d002015-04-09 13:40:55 +0100178 * This macro calculates the base address of the current CPU's MP stack
179 * using the plat_my_core_pos() index, the name of the stack storage
180 * and the size of each stack
181 * Out: X0 = physical address of stack base
182 * Clobber: X30, X1, X2
183 */
184 .macro get_my_mp_stack _name, _size
185 bl plat_my_core_pos
186 ldr x2, =(\_name + \_size)
187 mov x1, #\_size
188 madd x0, x0, x1, x2
189 .endm
190
191 /*
Andrew Thoelke2bf28e62014-03-20 10:48:23 +0000192 * This macro calculates the base address of a UP stack using the
193 * name of the stack storage and the size of the stack
194 * Out: X0 = physical address of stack base
195 */
196 .macro get_up_stack _name, _size
197 ldr x0, =(\_name + \_size)
198 .endm
Soby Mathewc67b09b2014-07-14 16:57:23 +0100199
200 /*
201 * Helper macro to generate the best mov/movk combinations according
202 * the value to be moved. The 16 bits from '_shift' are tested and
203 * if not zero, they are moved into '_reg' without affecting
204 * other bits.
205 */
206 .macro _mov_imm16 _reg, _val, _shift
207 .if (\_val >> \_shift) & 0xffff
208 .if (\_val & (1 << \_shift - 1))
209 movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
210 .else
211 mov \_reg, \_val & (0xffff << \_shift)
212 .endif
213 .endif
214 .endm
215
216 /*
217 * Helper macro to load arbitrary values into 32 or 64-bit registers
218 * which generates the best mov/movk combinations. Many base addresses
219 * are 64KB aligned the macro will eliminate updating bits 15:0 in
220 * that case
221 */
222 .macro mov_imm _reg, _val
223 .if (\_val) == 0
224 mov \_reg, #0
225 .else
226 _mov_imm16 \_reg, (\_val), 0
227 _mov_imm16 \_reg, (\_val), 16
228 _mov_imm16 \_reg, (\_val), 32
229 _mov_imm16 \_reg, (\_val), 48
230 .endif
231 .endm
Dan Handleye2bf57f2015-04-01 17:34:24 +0100232
233#endif /* __ASM_MACROS_S__ */