1 /* SPDX-License-Identifier: GPL-2.0+ */
4 * David Feng <fenghua@phytium.com.cn>
7 #include <asm-offsets.h>
9 #include <linux/linkage.h>
10 #include <asm/macro.h>
11 #include <asm/armv8/mmu.h>
13 /*************************************************************************
15 * Startup Code (reset vector)
17 *************************************************************************/
21 #if defined(CONFIG_LINUX_KERNEL_IMAGE_HEADER)
22 #include <asm/boot0-linux-kernel-header.h>
23 #elif defined(CONFIG_ENABLE_ARM_SOC_BOOT0_HOOK)
25 * Various SoCs need something special and SoC-specific up front in
26 * order to boot, allow them to set that in their boot0.h file and then
29 #include <asm/arch/boot0.h>
38 .quad CONFIG_SYS_TEXT_BASE
41 * These are defined in the linker script.
49 .quad __bss_start - _start
53 .quad __bss_end - _start
56 /* Allow the board to save important registers */
58 .globl save_boot_params_ret
61 #if CONFIG_POSITION_INDEPENDENT
63 * Fix .rela.dyn relocations. This allows U-Boot to be loaded to and
64 * executed at a different address than it was linked at.
67 adr x0, _start /* x0 <- Runtime value of _start */
68 ldr x1, _TEXT_BASE /* x1 <- Linked value of _start */
69 sub x9, x0, x1 /* x9 <- Run-vs-link offset */
70 adr x2, __rel_dyn_start /* x2 <- Runtime &__rel_dyn_start */
71 adr x3, __rel_dyn_end /* x3 <- Runtime &__rel_dyn_end */
73 ldp x0, x1, [x2], #16 /* (x0, x1) <- (Link location, fixup) */
74 ldr x4, [x2], #8 /* x4 <- addend */
75 cmp w1, #1027 /* relative fixup? */
77 /* relative fix: store addend plus offset at dest location */
87 #ifdef CONFIG_SYS_RESET_SCTRL
91 #if defined(CONFIG_ARMV8_SPL_EXCEPTION_VECTORS) || !defined(CONFIG_SPL_BUILD)
92 .macro set_vbar, regname, reg
97 .macro set_vbar, regname, reg
101 * Could be EL3/EL2/EL1, Initial State:
102 * Little Endian, MMU Disabled, i/dCache Disabled
104 switch_el x1, 3f, 2f, 1f
105 3: set_vbar vbar_el3, x0
107 orr x0, x0, #0xf /* SCR_EL3.NS|IRQ|FIQ|EA */
109 msr cptr_el3, xzr /* Enable FP/SIMD */
110 #ifdef COUNTER_FREQUENCY
111 ldr x0, =COUNTER_FREQUENCY
112 msr cntfrq_el0, x0 /* Initialize CNTFRQ */
115 2: set_vbar vbar_el2, x0
117 msr cptr_el2, x0 /* Enable FP/SIMD */
119 1: set_vbar vbar_el1, x0
121 msr cpacr_el1, x0 /* Enable FP/SIMD */
125 * Enable SMPEN bit for coherency.
126 * This register is not architectural but at the moment
127 * this bit should be set for A53/A57/A72.
129 #ifdef CONFIG_ARMV8_SET_SMPEN
130 switch_el x1, 3f, 1f, 1f
132 mrs x0, S3_1_c15_c2_1 /* cpuectlr_el1 */
134 msr S3_1_c15_c2_1, x0
138 /* Apply ARM core specific erratas */
142 * Cache/BPB/TLB Invalidate
143 * i-cache is invalidated before enabled in icache_enable()
144 * tlb is invalidated before mmu is enabled in dcache_enable()
145 * d-cache is invalidated before enabled in dcache_enable()
148 /* Processor specific initialization */
151 #if defined(CONFIG_ARMV8_SPIN_TABLE) && !defined(CONFIG_SPL_BUILD)
152 branch_if_master x0, x1, master_cpu
153 b spin_table_secondary_jump
155 #elif defined(CONFIG_ARMV8_MULTIENTRY)
156 branch_if_master x0, x1, master_cpu
163 ldr x1, =CPU_RELEASE_ADDR
166 br x0 /* branch to the given address */
167 #endif /* CONFIG_ARMV8_MULTIENTRY */
171 #ifdef CONFIG_SYS_RESET_SCTRL
173 switch_el x1, 3f, 2f, 1f
187 switch_el x1, 6f, 5f, 4f
200 b __asm_invalidate_tlb_all
204 /*-----------------------------------------------------------------------*/
206 WEAK(apply_core_errata)
208 mov x29, lr /* Save LR */
209 /* For now, we support Cortex-A53, Cortex-A57 specific errata */
211 /* Check if we are running on a Cortex-A53 core */
212 branch_if_a53_core x0, apply_a53_core_errata
214 /* Check if we are running on a Cortex-A57 core */
215 branch_if_a57_core x0, apply_a57_core_errata
217 mov lr, x29 /* Restore LR */
220 apply_a53_core_errata:
222 #ifdef CONFIG_ARM_ERRATA_855873
232 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
233 /* Enable data cache clean as data cache clean/invalidate */
235 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
239 apply_a57_core_errata:
241 #ifdef CONFIG_ARM_ERRATA_828024
242 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
243 /* Disable non-allocate hint of w-b-n-a memory type */
245 /* Disable write streaming no L1-allocate threshold */
247 /* Disable write streaming no-allocate threshold */
249 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
252 #ifdef CONFIG_ARM_ERRATA_826974
253 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
254 /* Disable speculative load execution ahead of a DMB */
256 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
259 #ifdef CONFIG_ARM_ERRATA_833471
260 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
261 /* FPSCR write flush.
262 * Note that in some cases where a flush is unnecessary this
263 could impact performance. */
265 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
268 #ifdef CONFIG_ARM_ERRATA_829520
269 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
270 /* Disable Indirect Predictor bit will prevent this erratum
272 * Note that in some cases where a flush is unnecessary this
273 could impact performance. */
275 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
278 #ifdef CONFIG_ARM_ERRATA_833069
279 mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */
280 /* Disable Enable Invalidates of BTB bit */
282 msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */
285 ENDPROC(apply_core_errata)
287 /*-----------------------------------------------------------------------*/
290 mov x29, lr /* Save LR */
292 #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
293 branch_if_slave x0, 1f
297 #if defined(CONFIG_GICV3)
299 bl gic_init_secure_percpu
300 #elif defined(CONFIG_GICV2)
303 bl gic_init_secure_percpu
307 #ifdef CONFIG_ARMV8_MULTIENTRY
308 branch_if_master x0, x1, 2f
311 * Slave should wait for master clearing spin table.
312 * This sync prevent salves observing incorrect
313 * value of spin table and jumping to wrong place.
315 #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
319 bl gic_wait_for_interrupt
323 * All slaves will enter EL2 and optionally EL1.
325 adr x4, lowlevel_in_el2
326 ldr x5, =ES_TO_AARCH64
327 bl armv8_switch_to_el2
330 #ifdef CONFIG_ARMV8_SWITCH_TO_EL1
331 adr x4, lowlevel_in_el1
332 ldr x5, =ES_TO_AARCH64
333 bl armv8_switch_to_el1
338 #endif /* CONFIG_ARMV8_MULTIENTRY */
341 mov lr, x29 /* Restore LR */
343 ENDPROC(lowlevel_init)
345 WEAK(smp_kick_all_cpus)
346 /* Kick secondary cpus up by SGI 0 interrupt */
347 #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
349 b gic_kick_secondary_cpus
352 ENDPROC(smp_kick_all_cpus)
354 /*-----------------------------------------------------------------------*/
356 ENTRY(c_runtime_cpu_setup)
357 #if defined(CONFIG_ARMV8_SPL_EXCEPTION_VECTORS) || !defined(CONFIG_SPL_BUILD)
360 switch_el x1, 3f, 2f, 1f
370 ENDPROC(c_runtime_cpu_setup)
372 WEAK(save_boot_params)
373 b save_boot_params_ret /* back to my caller */
374 ENDPROC(save_boot_params)