return 1;
}
+const Constant *
+RISCVTargetLowering::getTargetConstantFromLoad(LoadSDNode *Ld) const {
+ assert(Ld && "Unexpected null LoadSDNode");
+ if (!ISD::isNormalLoad(Ld))
+ return nullptr;
+
+ SDValue Ptr = Ld->getBasePtr();
+
+ // Only constant pools with no offset are supported.
+ auto GetSupportedConstantPool = [](SDValue Ptr) -> ConstantPoolSDNode * {
+ auto *CNode = dyn_cast<ConstantPoolSDNode>(Ptr);
+ if (!CNode || CNode->isMachineConstantPoolEntry() ||
+ CNode->getOffset() != 0)
+ return nullptr;
+
+ return CNode;
+ };
+
+ // Simple case, LLA.
+ if (Ptr.getOpcode() == RISCVISD::LLA) {
+ auto *CNode = GetSupportedConstantPool(Ptr);
+ if (!CNode || CNode->getTargetFlags() != 0)
+ return nullptr;
+
+ return CNode->getConstVal();
+ }
+
+ // Look for a HI and ADD_LO pair.
+ if (Ptr.getOpcode() != RISCVISD::ADD_LO ||
+ Ptr.getOperand(0).getOpcode() != RISCVISD::HI)
+ return nullptr;
+
+ auto *CNodeLo = GetSupportedConstantPool(Ptr.getOperand(1));
+ auto *CNodeHi = GetSupportedConstantPool(Ptr.getOperand(0).getOperand(0));
+
+ if (!CNodeLo || CNodeLo->getTargetFlags() != RISCVII::MO_LO ||
+ !CNodeHi || CNodeHi->getTargetFlags() != RISCVII::MO_HI)
+ return nullptr;
+
+ if (CNodeLo->getConstVal() != CNodeHi->getConstVal())
+ return nullptr;
+
+ return CNodeLo->getConstVal();
+}
+
static MachineBasicBlock *emitReadCycleWidePseudo(MachineInstr &MI,
MachineBasicBlock *BB) {
assert(MI.getOpcode() == RISCV::ReadCycleWide && "Unexpected instruction");
; RV64ZBP: # %bb.0:
; RV64ZBP-NEXT: lui a1, %hi(.LCPI54_0)
; RV64ZBP-NEXT: ld a1, %lo(.LCPI54_0)(a1)
-; RV64ZBP-NEXT: lui a2, %hi(.LCPI54_1)
-; RV64ZBP-NEXT: ld a2, %lo(.LCPI54_1)(a2)
; RV64ZBP-NEXT: or a0, a0, a1
; RV64ZBP-NEXT: orc32 a0, a0
-; RV64ZBP-NEXT: or a0, a0, a2
; RV64ZBP-NEXT: ret
%tmp = or i64 %a, 72624976668147840 ; 0x102040810204080
%tmp2 = call i64 @llvm.riscv.gorc.i64(i64 %tmp, i64 32)
;
; RV64ZBP-LABEL: gorc2b_i64:
; RV64ZBP: # %bb.0:
-; RV64ZBP-NEXT: lui a1, %hi(.LCPI26_0)
-; RV64ZBP-NEXT: ld a1, %lo(.LCPI26_0)(a1)
-; RV64ZBP-NEXT: srli a2, a0, 2
-; RV64ZBP-NEXT: and a2, a2, a1
+; RV64ZBP-NEXT: srli a1, a0, 2
+; RV64ZBP-NEXT: or a1, a1, a0
+; RV64ZBP-NEXT: orc2.n a0, a0
+; RV64ZBP-NEXT: lui a2, %hi(.LCPI26_0)
+; RV64ZBP-NEXT: ld a2, %lo(.LCPI26_0)(a2)
; RV64ZBP-NEXT: lui a3, %hi(.LCPI26_1)
; RV64ZBP-NEXT: ld a3, %lo(.LCPI26_1)(a3)
-; RV64ZBP-NEXT: or a2, a2, a0
-; RV64ZBP-NEXT: orc2.n a0, a0
-; RV64ZBP-NEXT: slli a2, a2, 2
+; RV64ZBP-NEXT: slli a1, a1, 2
+; RV64ZBP-NEXT: and a1, a1, a2
+; RV64ZBP-NEXT: srli a2, a0, 2
; RV64ZBP-NEXT: and a2, a2, a3
-; RV64ZBP-NEXT: srli a3, a0, 2
-; RV64ZBP-NEXT: and a1, a3, a1
-; RV64ZBP-NEXT: or a0, a1, a0
-; RV64ZBP-NEXT: or a0, a0, a2
+; RV64ZBP-NEXT: or a0, a2, a0
+; RV64ZBP-NEXT: or a0, a0, a1
; RV64ZBP-NEXT: ret
%and1 = shl i64 %a, 2
%shl1 = and i64 %and1, -3689348814741910324