}
static unsigned getPreIndexedOpcode(unsigned Opc) {
+ // FIXME: We don't currently support creating pre-indexed loads/stores when
+ // the load or store is the unscaled version. If we decide to perform such an
+ // optimization in the future the cases for the unscaled loads/stores will
+ // need to be added here.
switch (Opc) {
default:
llvm_unreachable("Opcode has no pre-indexed equivalent!");
default:
llvm_unreachable("Opcode has no post-indexed wise equivalent!");
case AArch64::STRSui:
+ case AArch64::STURSi:
return AArch64::STRSpost;
case AArch64::STRDui:
+ case AArch64::STURDi:
return AArch64::STRDpost;
case AArch64::STRQui:
+ case AArch64::STURQi:
return AArch64::STRQpost;
case AArch64::STRBBui:
return AArch64::STRBBpost;
case AArch64::STRHHui:
return AArch64::STRHHpost;
case AArch64::STRWui:
+ case AArch64::STURWi:
return AArch64::STRWpost;
case AArch64::STRXui:
+ case AArch64::STURXi:
return AArch64::STRXpost;
case AArch64::LDRSui:
+ case AArch64::LDURSi:
return AArch64::LDRSpost;
case AArch64::LDRDui:
+ case AArch64::LDURDi:
return AArch64::LDRDpost;
case AArch64::LDRQui:
+ case AArch64::LDURQi:
return AArch64::LDRQpost;
case AArch64::LDRBBui:
return AArch64::LDRBBpost;
case AArch64::LDRHHui:
return AArch64::LDRHHpost;
case AArch64::LDRWui:
+ case AArch64::LDURWi:
return AArch64::LDRWpost;
case AArch64::LDRXui:
+ case AArch64::LDURXi:
return AArch64::LDRXpost;
case AArch64::LDRSWui:
return AArch64::LDRSWpost;
++NumPostFolded;
break;
}
- // Don't know how to handle pre/post-index versions, so move to the next
- // instruction.
+
+ // Don't know how to handle unscaled pre/post-index versions below, so
+ // move to the next instruction.
if (TII->isUnscaledLdSt(Opc)) {
++MBBI;
break;
--- /dev/null
+# RUN: llc -mtriple=aarch64-none-linux-gnu -run-pass aarch64-ldst-opt -verify-machineinstrs -o - %s | FileCheck %s
+---
+# CHECK-LABEL: name: test_LDURSi_post
+# CHECK: LDRSpost %x0, -4
+name: test_LDURSi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %s0 = LDURSi %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_LDURDi_post
+# CHECK: LDRDpost %x0, -4
+name: test_LDURDi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %d0 = LDURDi %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_LDURQi_post
+# CHECK: LDRQpost %x0, -4
+name: test_LDURQi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %q0 = LDURQi %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_LDURWi_post
+# CHECK: LDRWpost %x0, -4
+name: test_LDURWi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %w1 = LDURWi %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_LDURXi_post
+# CHECK: %x1 = LDRXpost %x0, -4
+name: test_LDURXi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %x1 = LDURXi %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_STURSi_post
+# CHECK: STRSpost %s0, %x0, -4
+name: test_STURSi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %s0 = FMOVS0
+ STURSi %s0, %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_STURDi_post
+# CHECK: STRDpost %d0, %x0, -4
+name: test_STURDi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %d0 = FMOVD0
+ STURDi %d0, %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_STURQi_post
+# CHECK: STRQpost %q0, %x0, -4
+name: test_STURQi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ %q0 = MOVIv4i32 0, 0
+ STURQi %q0, %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_STURWi_post
+# CHECK: STRWpost %wzr, %x0, -4
+name: test_STURWi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ STURWi %wzr, %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...
+# CHECK-LABEL: name: test_STURXi_post
+# CHECK: STRXpost %xzr, %x0, -4
+name: test_STURXi_post
+body: |
+ bb.0.entry:
+ liveins: %x0
+
+ STURXi %xzr, %x0, 0
+ %x0 = SUBXri %x0, 4, 0
+ RET_ReallyLR implicit %x0
+...