1 /* SPDX-License-Identifier: GPL-2.0+ */
3 * include/asm-arm/macro.h
5 * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
8 #ifndef __ASM_ARM_MACRO_H__
9 #define __ASM_ARM_MACRO_H__
12 #include <asm/system.h>
18 * These macros provide a convenient way to write 8, 16 and 32 bit data
20 * Registers r4 and r5 are used, any data in these registers are
21 * overwritten by the macros.
22 * The macros are valid for any ARM architecture, they do not implement
23 * any memory barriers so caution is recommended when using these when the
24 * caches are enabled or on a multi-core system.
27 .macro write32, addr, data
33 .macro write16, addr, data
39 .macro write8, addr, data
46 * This macro generates a loop that can be used for delays in the code.
47 * Register r4 is used, any data in this register is overwritten by the
49 * The macro is valid for any ARM architeture. The actual time spent in the
50 * loop will vary from CPU to CPU though.
53 .macro wait_timer, time
68 * Branch according to exception level
70 .macro switch_el, xreg, el3_label, el2_label, el1_label
79 * Branch if we are not in the highest exception level
81 .macro branch_if_not_highest_el, xreg, label
82 switch_el \xreg, 3f, 2f, 1f
84 2: mrs \xreg, ID_AA64PFR0_EL1
85 and \xreg, \xreg, #(ID_AA64PFR0_EL1_EL3)
89 1: mrs \xreg, ID_AA64PFR0_EL1
90 and \xreg, \xreg, #(ID_AA64PFR0_EL1_EL3 | ID_AA64PFR0_EL1_EL2)
97 * Branch if current processor is a Cortex-A57 core.
99 .macro branch_if_a57_core, xreg, a57_label
102 and \xreg, \xreg, #0x00000FFF
103 cmp \xreg, #0xD07 /* Cortex-A57 MPCore processor. */
108 * Branch if current processor is a Cortex-A53 core.
110 .macro branch_if_a53_core, xreg, a53_label
113 and \xreg, \xreg, #0x00000FFF
114 cmp \xreg, #0xD03 /* Cortex-A53 MPCore processor. */
119 * Branch if current processor is a slave,
120 * choose processor with all zero affinity value as the master.
122 .macro branch_if_slave, xreg, slave_label
123 #ifdef CONFIG_ARMV8_MULTIENTRY
125 and \xreg, \xreg, 0xffffffffff /* clear bits [63:40] */
126 and \xreg, \xreg, ~0x00ff000000 /* also clear bits [31:24] */
127 cbnz \xreg, \slave_label
132 * Branch if current processor is a master,
133 * choose processor with all zero affinity value as the master.
135 .macro branch_if_master, xreg, master_label
136 #ifdef CONFIG_ARMV8_MULTIENTRY
138 and \xreg, \xreg, 0xffffffffff /* clear bits [63:40] */
139 and \xreg, \xreg, ~0x00ff000000 /* also clear bits [31:24] */
140 cbz \xreg, \master_label
147 * Switch from EL3 to EL2 for ARMv8
148 * @ep: kernel entry point
149 * @flag: The execution state flag for lower exception
150 * level, ES_TO_AARCH64 or ES_TO_AARCH32
151 * @tmp: temporary register
153 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
154 * For loading 64-bit OS, x0 is physical address to the FDT blob.
155 * They will be passed to the guest.
157 .macro armv8_switch_to_el2_m, ep, flag, tmp
158 msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */
159 mov \tmp, #CPTR_EL2_RES1
160 msr cptr_el2, \tmp /* Disable coprocessor traps to EL2 */
162 /* Initialize Generic Timers */
165 /* Initialize SCTLR_EL2
167 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
168 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
169 * EE,WXN,I,SA,C,A,M to 0
171 ldr \tmp, =(SCTLR_EL2_RES1 | SCTLR_EL2_EE_LE |\
172 SCTLR_EL2_WXN_DIS | SCTLR_EL2_ICACHE_DIS |\
173 SCTLR_EL2_SA_DIS | SCTLR_EL2_DCACHE_DIS |\
174 SCTLR_EL2_ALIGN_DIS | SCTLR_EL2_MMU_DIS)
178 msr sp_el2, \tmp /* Migrate SP */
180 msr vbar_el2, \tmp /* Migrate VBAR */
182 /* Check switch to AArch64 EL2 or AArch32 Hypervisor mode */
183 cmp \flag, #ES_TO_AARCH32
187 * The next lower exception level is AArch64, 64bit EL2 | HCE |
188 * RES1 (Bits[5:4]) | Non-secure EL0/EL1.
189 * and the SMD depends on requirements.
191 #ifdef CONFIG_ARMV8_PSCI
192 ldr \tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
193 SCR_EL3_RES1 | SCR_EL3_NS_EN)
195 ldr \tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
196 SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
200 #ifdef CONFIG_ARMV8_EA_EL3_FIRST
201 orr \tmp, \tmp, #SCR_EL3_EA_EN
205 /* Return to the EL2_SP2 mode from EL3 */
206 ldr \tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
207 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
208 SPSR_EL_M_AARCH64 | SPSR_EL_M_EL2H)
215 * The next lower exception level is AArch32, 32bit EL2 | HCE |
216 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
218 ldr \tmp, =(SCR_EL3_RW_AARCH32 | SCR_EL3_HCE_EN |\
219 SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
223 /* Return to AArch32 Hypervisor mode */
224 ldr \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
225 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
226 SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
234 * Switch from EL2 to EL1 for ARMv8
235 * @ep: kernel entry point
236 * @flag: The execution state flag for lower exception
237 * level, ES_TO_AARCH64 or ES_TO_AARCH32
238 * @tmp: temporary register
240 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
241 * For loading 64-bit OS, x0 is physical address to the FDT blob.
242 * They will be passed to the guest.
244 .macro armv8_switch_to_el1_m, ep, flag, tmp, tmp2
245 /* Initialize Generic Timers */
246 mrs \tmp, cnthctl_el2
247 /* Enable EL1 access to timers */
248 orr \tmp, \tmp, #(CNTHCTL_EL2_EL1PCEN_EN |\
249 CNTHCTL_EL2_EL1PCTEN_EN)
250 msr cnthctl_el2, \tmp
253 /* Initilize MPID/MPIDR registers */
259 /* Disable coprocessor traps */
260 mov \tmp, #CPTR_EL2_RES1
261 msr cptr_el2, \tmp /* Disable coprocessor traps to EL2 */
262 msr hstr_el2, xzr /* Disable coprocessor traps to EL2 */
263 mov \tmp, #CPACR_EL1_FPEN_EN
264 msr cpacr_el1, \tmp /* Enable FP/SIMD at EL1 */
266 /* SCTLR_EL1 initialization
268 * setting RES1 bits (29,28,23,22,20,11) to 1
269 * and RES0 bits (31,30,27,21,17,13,10,6) +
270 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
271 * CP15BEN,SA0,SA,C,A,M to 0
273 ldr \tmp, =(SCTLR_EL1_RES1 | SCTLR_EL1_UCI_DIS |\
274 SCTLR_EL1_EE_LE | SCTLR_EL1_WXN_DIS |\
275 SCTLR_EL1_NTWE_DIS | SCTLR_EL1_NTWI_DIS |\
276 SCTLR_EL1_UCT_DIS | SCTLR_EL1_DZE_DIS |\
277 SCTLR_EL1_ICACHE_DIS | SCTLR_EL1_UMA_DIS |\
278 SCTLR_EL1_SED_EN | SCTLR_EL1_ITD_EN |\
279 SCTLR_EL1_CP15BEN_DIS | SCTLR_EL1_SA0_DIS |\
280 SCTLR_EL1_SA_DIS | SCTLR_EL1_DCACHE_DIS |\
281 SCTLR_EL1_ALIGN_DIS | SCTLR_EL1_MMU_DIS)
285 msr sp_el1, \tmp /* Migrate SP */
287 msr vbar_el1, \tmp /* Migrate VBAR */
289 /* Check switch to AArch64 EL1 or AArch32 Supervisor mode */
290 cmp \flag, #ES_TO_AARCH32
293 /* Initialize HCR_EL2 */
294 /* Only disable PAuth traps if PAuth is supported */
295 mrs \tmp, id_aa64isar1_el1
296 ldr \tmp2, =(ID_AA64ISAR1_EL1_GPI | ID_AA64ISAR1_EL1_GPA | \
297 ID_AA64ISAR1_EL1_API | ID_AA64ISAR1_EL1_APA)
299 mov \tmp2, #(HCR_EL2_RW_AARCH64 | HCR_EL2_HCD_DIS)
300 orr \tmp, \tmp2, #(HCR_EL2_APK | HCR_EL2_API)
301 csel \tmp, \tmp2, \tmp, eq
304 /* Return to the EL1_SP1 mode from EL2 */
305 ldr \tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
306 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
307 SPSR_EL_M_AARCH64 | SPSR_EL_M_EL1H)
313 /* Initialize HCR_EL2 */
314 ldr \tmp, =(HCR_EL2_RW_AARCH32 | HCR_EL2_HCD_DIS)
317 /* Return to AArch32 Supervisor mode from EL2 */
318 ldr \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
319 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
320 SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
327 #if defined(CONFIG_GICV3)
328 .macro gic_wait_for_interrupt_m xreg1
330 mrs \xreg1, ICC_IAR1_EL1
331 msr ICC_EOIR1_EL1, \xreg1
334 #elif defined(CONFIG_GICV2)
335 .macro gic_wait_for_interrupt_m xreg1, wreg2
337 ldr \wreg2, [\xreg1, GICC_AIAR]
338 str \wreg2, [\xreg1, GICC_AEOIR]
339 and \wreg2, \wreg2, #0x3ff
344 #endif /* CONFIG_ARM64 */
346 #endif /* __ASSEMBLY__ */
347 #endif /* __ASM_ARM_MACRO_H__ */