// | | imm12 | |
// +-------------------+-----------------------+-------------------+
-inline uint64_t encodePageOff12(uint64_t base, uint64_t va) {
- int scale = ((base & 0x3b000000) == 0x39000000) ? base >> 30 : 0;
+inline uint64_t encodePageOff12(uint32_t base, uint64_t va) {
+ int scale = 0;
+ if ((base & 0x3b00'0000) == 0x3900'0000) { // load/store
+ scale = base >> 30;
+ if (scale == 0 && (base & 0x0480'0000) == 0x0480'0000) // vector op?
+ scale = 4;
+ }
+
// TODO(gkm): extract embedded addend and warn if != 0
// uint64_t addend = ((base & 0x003FFC00) >> 10);
return (base | bitField(va, scale, 12 - scale, 10));
## PAGE21 relocations are aligned to 4096 bytes
# CHECK-NEXT: adrp x2, [[#]] ; 0x[[#BAZ+4096-128]]
# CHECK-NEXT: ldr x2, [x2, #128]
+# CHECK-NEXT: adrp x3, 8 ; 0x8000
+# CHECK-NEXT: ldr q0, [x3, #144]
# CHECK-NEXT: ret
# CHECK-LABEL: Contents of (__DATA_CONST,__const) section
# CHECK: [[#PTR_2]] {{0*}}[[#BAZ+123]] 00000000 00000000 00000000
.text
-.globl _foo, _bar, _baz
+.globl _foo, _bar, _baz, _quux
.p2align 2
_foo:
## Generates ARM64_RELOC_BRANCH26 and ARM64_RELOC_ADDEND
adrp x2, _baz@PAGE + 4097
## Generates ARM64_RELOC_PAGEOFF12
ldr x2, [x2, _baz@PAGEOFF]
+
+ ## Generates ARM64_RELOC_PAGE21
+ adrp x3, _quux@PAGE
+ ## Generates ARM64_RELOC_PAGEOFF12 with internal slide 4
+ ldr q0, [x3, _quux@PAGEOFF]
ret
.p2align 2
_baz:
.space 1
+.p2align 4
+_quux:
+.quad 0
+.quad 80
+
.section __DATA_CONST,__const
## These generate ARM64_RELOC_UNSIGNED symbol relocations. llvm-mc seems to
## generate UNSIGNED section relocations only for compact unwind sections, so