1 /* SPDX-License-Identifier: GPL-2.0+ */
3 * relocate - common relocation function for ARM U-Boot
5 * Copyright (c) 2013 Albert ARIBAUD <albert.u.boot@aribaud.net>
8 #include <asm-offsets.h>
9 #include <asm/assembler.h>
12 #include <linux/linkage.h>
14 #include <asm/armv7m.h>
18 * Default/weak exception vectors relocation routine
20 * This routine covers the standard ARM cases: normal (0x00000000),
21 * high (0xffff0000) and VBAR. SoCs which do not comply with any of
22 * the standard cases must provide their own, strong, version.
25 .section .text.relocate_vectors,"ax",%progbits
26 .weak relocate_vectors
28 ENTRY(relocate_vectors)
32 * On ARMv7-M we only have to write the new vector address
35 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
37 str r0, [r1, V7M_SCB_VTOR]
39 #ifdef CONFIG_HAS_VBAR
41 * If the ARM processor has the security extensions,
42 * use VBAR to relocate the exception vectors.
44 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
45 mcr p15, 0, r0, c12, c0, 0 /* Set VBAR */
48 * Copy the relocated exception vectors to the
50 * CP15 c1 V bit gives us the location of the vectors:
51 * 0x00000000 or 0xFFFF0000.
53 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
54 mrc p15, 0, r2, c1, c0, 0 /* V bit (bit[13]) in CP15 c1 */
55 ands r2, r2, #(1 << 13)
56 ldreq r1, =0x00000000 /* If V=0 */
57 ldrne r1, =0xFFFF0000 /* If V=1 */
58 ldmia r0!, {r2-r8,r10}
59 stmia r1!, {r2-r8,r10}
60 ldmia r0!, {r2-r8,r10}
61 stmia r1!, {r2-r8,r10}
66 ENDPROC(relocate_vectors)
69 * void relocate_code(addr_moni)
71 * This function relocates the monitor code.
74 * To prevent the code below from containing references with an R_ARM_ABS32
75 * relocation record type, we never refer to linker-defined symbols directly.
76 * Instead, we declare literals which contain their relative location with
77 * respect to relocate_code, and at run time, add relocate_code back to them.
83 ldr r1, _image_copy_start_ofs
84 add r1, r3 /* r1 <- Run &__image_copy_start */
85 subs r4, r0, r1 /* r4 <- Run to copy offset */
86 beq relocate_done /* skip relocation */
87 ldr r1, _image_copy_start_ofs
88 add r1, r3 /* r1 <- Run &__image_copy_start */
89 ldr r2, _image_copy_end_ofs
90 add r2, r3 /* r2 <- Run &__image_copy_end */
92 ldmia r1!, {r10-r11} /* copy from source address [r1] */
93 stmia r0!, {r10-r11} /* copy to target address [r0] */
94 cmp r1, r2 /* until source end address [r2] */
98 * fix .rel.dyn relocations
100 ldr r1, _rel_dyn_start_ofs
101 add r2, r1, r3 /* r2 <- Run &__rel_dyn_start */
102 ldr r1, _rel_dyn_end_ofs
103 add r3, r1, r3 /* r3 <- Run &__rel_dyn_end */
105 ldmia r2!, {r0-r1} /* (r0,r1) <- (SRC location,fixup) */
107 cmp r1, #R_ARM_RELATIVE
110 /* relative fix: increase location by offset */
123 * On xscale, icache must be invalidated and write buffers drained,
124 * even with cache disabled - 4.2.7 of xscale core developer's manual
126 mcr p15, 0, r0, c7, c7, 0 /* invalidate icache */
127 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
132 ENDPROC(relocate_code)
134 _image_copy_start_ofs:
135 .word __image_copy_start - relocate_code
137 .word __image_copy_end - relocate_code
139 .word __rel_dyn_start - relocate_code
141 .word __rel_dyn_end - relocate_code