From: Jez Ng Date: Sat, 27 Feb 2021 17:30:19 +0000 (-0500) Subject: [lld-macho] Extract embedded addends for arm64 UNSIGNED relocations X-Git-Tag: llvmorg-14-init~13856 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=82b3da6f6f0e91e47074f454907e82b284b8beda;p=platform%2Fupstream%2Fllvm.git [lld-macho] Extract embedded addends for arm64 UNSIGNED relocations On arm64, UNSIGNED relocs are the only ones that use embedded addends instead of the ADDEND relocation. Also ensure that the addend works when UNSIGNED is part of a SUBTRACTOR pair. Reviewed By: #lld-macho, alexshap Differential Revision: https://reviews.llvm.org/D97105 --- diff --git a/lld/MachO/Arch/ARM64.cpp b/lld/MachO/Arch/ARM64.cpp index 8428ef4..1813950 100644 --- a/lld/MachO/Arch/ARM64.cpp +++ b/lld/MachO/Arch/ARM64.cpp @@ -79,8 +79,23 @@ const TargetInfo::RelocAttrs &ARM64::getRelocAttrs(uint8_t type) const { uint64_t ARM64::getEmbeddedAddend(MemoryBufferRef mb, const section_64 &sec, const relocation_info rel) const { - // TODO(gkm): extract embedded addend just so we can assert that it is 0 - return 0; + if (rel.r_type != ARM64_RELOC_UNSIGNED) { + // All other reloc types should use the ADDEND relocation to store their + // addends. + // TODO(gkm): extract embedded addend just so we can assert that it is 0 + return 0; + } + + auto *buf = reinterpret_cast(mb.getBufferStart()); + const uint8_t *loc = buf + sec.offset + rel.r_address; + switch (rel.r_length) { + case 2: + return read32le(loc); + case 3: + return read64le(loc); + default: + llvm_unreachable("invalid r_length"); + } } inline uint64_t bitField(uint64_t value, int right, int width, int left) { diff --git a/lld/MachO/InputFiles.cpp b/lld/MachO/InputFiles.cpp index 9203022..5712611 100644 --- a/lld/MachO/InputFiles.cpp +++ b/lld/MachO/InputFiles.cpp @@ -278,9 +278,6 @@ void ObjFile::parseRelocations(const section_64 &sec, continue; if (relInfo.r_address & R_SCATTERED) fatal("TODO: Scattered relocations not supported"); - uint64_t embeddedAddend = target->getEmbeddedAddend(mb, sec, relInfo); - assert(!(embeddedAddend && pairedAddend)); - uint64_t totalAddend = pairedAddend + embeddedAddend; Reloc p; if (target->hasAttr(relInfo.r_type, RelocAttrBits::SUBTRAHEND)) { @@ -292,6 +289,9 @@ void ObjFile::parseRelocations(const section_64 &sec, assert(target->hasAttr(relInfo.r_type, RelocAttrBits::UNSIGNED) && relInfo.r_extern); } + uint64_t embeddedAddend = target->getEmbeddedAddend(mb, sec, relInfo); + assert(!(embeddedAddend && pairedAddend)); + uint64_t totalAddend = pairedAddend + embeddedAddend; Reloc r; r.type = relInfo.r_type; r.pcrel = relInfo.r_pcrel; diff --git a/lld/test/MachO/arm64-relocs.s b/lld/test/MachO/arm64-relocs.s index 0f3dd87..1ce5e10 100644 --- a/lld/test/MachO/arm64-relocs.s +++ b/lld/test/MachO/arm64-relocs.s @@ -18,8 +18,8 @@ # CHECK-NEXT: ret # CHECK-LABEL: Contents of (__DATA_CONST,__const) section -# CHECK: [[#PTR_1]] {{0*}}[[#BAZ]] 00000000 00000000 00000000 -# CHECK: [[#PTR_2]] {{0*}}[[#BAZ]] 00000000 00000000 00000000 +# CHECK: [[#PTR_1]] {{0*}}[[#BAZ]] 00000000 00000000 00000000 +# CHECK: [[#PTR_2]] {{0*}}[[#BAZ+123]] 00000000 00000000 00000000 .text .globl _foo, _bar, _baz @@ -50,7 +50,7 @@ _ptr_1: .quad _baz .space 8 _ptr_2: - .quad _baz + .quad _baz + 123 .space 8 .subsections_via_symbols diff --git a/lld/test/MachO/reloc-subtractor.s b/lld/test/MachO/reloc-subtractor.s index e445184..dead12f 100644 --- a/lld/test/MachO/reloc-subtractor.s +++ b/lld/test/MachO/reloc-subtractor.s @@ -14,9 +14,9 @@ # CHECK: {{0*}}[[#%x, SUB4ADDR:]] l {{.*}} __DATA,__data _sub4 # CHECK-LABEL: Contents of section __DATA,__data: # CHECK: [[#SUB1ADDR]] 10000000 -# CHECK-NEXT: [[#SUB2ADDR]] f0ffffff -# CHECK-NEXT: [[#SUB3ADDR]] 10000000 00000000 -# CHECK-NEXT: [[#SUB4ADDR]] f0ffffff ffffffff +# CHECK-NEXT: [[#SUB2ADDR]] f2ffffff +# CHECK-NEXT: [[#SUB3ADDR]] 14000000 00000000 +# CHECK-NEXT: [[#SUB4ADDR]] f6ffffff ffffffff # CHECK: Rebase table: # CHECK-NEXT: segment section address type # CHECK-EMPTY: @@ -36,13 +36,13 @@ _sub1: .long _minuend_1 - _subtrahend_1 .space 12 _sub2: - .long _minuend_2 - _subtrahend_2 + .long _minuend_2 - _subtrahend_2 + 2 .space 12 _sub3: - .quad _minuend_1 - _subtrahend_1 + .quad _minuend_1 - _subtrahend_1 + 4 .space 8 _sub4: - .quad _minuend_2 - _subtrahend_2 + .quad _minuend_2 - _subtrahend_2 + 6 .text .p2align 2