// anyway for most targets.
if (!isPowerOf2_32(NewSizeBits))
return false;
+
+ const MachineMemOperand &MMO = LoadDef->getMMO();
+ LegalityQuery::MemDesc MMDesc;
+ MMDesc.MemoryTy = LLT::scalar(NewSizeBits);
+ MMDesc.AlignInBits = MMO.getAlign().value() * 8;
+ MMDesc.Ordering = MMO.getSuccessOrdering();
+ if (!isLegalOrBeforeLegalizer({TargetOpcode::G_SEXTLOAD,
+ {MRI.getType(LoadDef->getDstReg()),
+ MRI.getType(LoadDef->getPointerReg())},
+ {MMDesc}}))
+ return false;
+
MatchInfo = std::make_tuple(LoadDef->getDstReg(), NewSizeBits);
return true;
}
--- /dev/null
+# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
+# RUN: llc -global-isel -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 -run-pass=amdgpu-postlegalizer-combiner -verify-machineinstrs -o - %s | FileCheck %s
+
+# Post-legalizer should not generate illegal extending loads
+---
+name: sextload_from_inreg
+tracksRegLiveness: true
+legalized: true
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1
+ ; CHECK-LABEL: name: sextload_from_inreg
+ ; CHECK: liveins: $vgpr0_vgpr1
+ ; CHECK: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CHECK: [[LOAD:%[0-9]+]]:_(s64) = G_LOAD [[COPY]](p1) :: (load (s64), addrspace 1)
+ ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[LOAD]], 8
+ ; CHECK: $vgpr0_vgpr1 = COPY [[SEXT_INREG]](s64)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(s64) = G_LOAD %0 :: (load (s64), align 8, addrspace 1)
+ %2:_(s64) = G_SEXT_INREG %1, 8
+ $vgpr0_vgpr1 = COPY %2
+...