From: Duncan P. N. Exon Smith Date: Mon, 22 Feb 2016 03:12:42 +0000 (+0000) Subject: Revert "CodeGen: Use references in MachineTraceMetrics::Trace, NFC" X-Git-Tag: llvmorg-3.9.0-rc1~13578 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=0cc90a9147ccaef59cdb9caf4d68de990e5fdd05;p=platform%2Fupstream%2Fllvm.git Revert "CodeGen: Use references in MachineTraceMetrics::Trace, NFC" This reverts commit r261509. I'm not sure how this compiled locally, but something was out of whack. llvm-svn: 261510 --- diff --git a/llvm/include/llvm/CodeGen/MachineTraceMetrics.h b/llvm/include/llvm/CodeGen/MachineTraceMetrics.h index 06db17a..4cd3810 100644 --- a/llvm/include/llvm/CodeGen/MachineTraceMetrics.h +++ b/llvm/include/llvm/CodeGen/MachineTraceMetrics.h @@ -276,24 +276,24 @@ public: /// Return the depth and height of MI. The depth is only valid for /// instructions in or above the trace center block. The height is only /// valid for instructions in or below the trace center block. - InstrCycles getInstrCycles(const MachineInstr &MI) const { - return TE.Cycles.lookup(&MI); + InstrCycles getInstrCycles(const MachineInstr *MI) const { + return TE.Cycles.lookup(MI); } /// Return the slack of MI. This is the number of cycles MI can be delayed /// before the critical path becomes longer. /// MI must be an instruction in the trace center block. - unsigned getInstrSlack(const MachineInstr &MI) const; + unsigned getInstrSlack(const MachineInstr *MI) const; /// Return the Depth of a PHI instruction in a trace center block successor. /// The PHI does not have to be part of the trace. - unsigned getPHIDepth(const MachineInstr &PHI) const; + unsigned getPHIDepth(const MachineInstr *PHI) const; /// A dependence is useful if the basic block of the defining instruction /// is part of the trace of the user instruction. It is assumed that DefMI /// dominates UseMI (see also isUsefulDominator). - bool isDepInTrace(const MachineInstr &DefMI, - const MachineInstr &UseMI) const; + bool isDepInTrace(const MachineInstr *DefMI, + const MachineInstr *UseMI) const; }; /// A trace ensemble is a collection of traces selected using the same diff --git a/llvm/lib/CodeGen/EarlyIfConversion.cpp b/llvm/lib/CodeGen/EarlyIfConversion.cpp index f253dc8..3e92a7a 100644 --- a/llvm/lib/CodeGen/EarlyIfConversion.cpp +++ b/llvm/lib/CodeGen/EarlyIfConversion.cpp @@ -718,7 +718,7 @@ bool EarlyIfConverter::shouldConvertIf() { // TBB / FBB data dependencies may delay the select even more. MachineTraceMetrics::Trace HeadTrace = MinInstr->getTrace(IfConv.Head); unsigned BranchDepth = - HeadTrace.getInstrCycles(*IfConv.Head->getFirstTerminator()).Depth; + HeadTrace.getInstrCycles(IfConv.Head->getFirstTerminator()).Depth; DEBUG(dbgs() << "Branch depth: " << BranchDepth << '\n'); // Look at all the tail phis, and compute the critical path extension caused @@ -726,8 +726,8 @@ bool EarlyIfConverter::shouldConvertIf() { MachineTraceMetrics::Trace TailTrace = MinInstr->getTrace(IfConv.Tail); for (unsigned i = 0, e = IfConv.PHIs.size(); i != e; ++i) { SSAIfConv::PHIInfo &PI = IfConv.PHIs[i]; - unsigned Slack = TailTrace.getInstrSlack(*PI.PHI); - unsigned MaxDepth = Slack + TailTrace.getInstrCycles(*PI.PHI).Depth; + unsigned Slack = TailTrace.getInstrSlack(PI.PHI); + unsigned MaxDepth = Slack + TailTrace.getInstrCycles(PI.PHI).Depth; DEBUG(dbgs() << "Slack " << Slack << ":\t" << *PI.PHI); // The condition is pulled into the critical path. @@ -742,7 +742,7 @@ bool EarlyIfConverter::shouldConvertIf() { } // The TBB value is pulled into the critical path. - unsigned TDepth = adjCycles(TBBTrace.getPHIDepth(*PI.PHI), PI.TCycles); + unsigned TDepth = adjCycles(TBBTrace.getPHIDepth(PI.PHI), PI.TCycles); if (TDepth > MaxDepth) { unsigned Extra = TDepth - MaxDepth; DEBUG(dbgs() << "TBB data adds " << Extra << " cycles.\n"); @@ -753,7 +753,7 @@ bool EarlyIfConverter::shouldConvertIf() { } // The FBB value is pulled into the critical path. - unsigned FDepth = adjCycles(FBBTrace.getPHIDepth(*PI.PHI), PI.FCycles); + unsigned FDepth = adjCycles(FBBTrace.getPHIDepth(PI.PHI), PI.FCycles); if (FDepth > MaxDepth) { unsigned Extra = FDepth - MaxDepth; DEBUG(dbgs() << "FBB data adds " << Extra << " cycles.\n"); diff --git a/llvm/lib/CodeGen/MachineCombiner.cpp b/llvm/lib/CodeGen/MachineCombiner.cpp index b5416f9..fa43c4d 100644 --- a/llvm/lib/CodeGen/MachineCombiner.cpp +++ b/llvm/lib/CodeGen/MachineCombiner.cpp @@ -156,7 +156,7 @@ MachineCombiner::getDepth(SmallVectorImpl &InsInstrs, } else { MachineInstr *DefInstr = getOperandDef(MO); if (DefInstr) { - DepthOp = BlockTrace.getInstrCycles(*DefInstr).Depth; + DepthOp = BlockTrace.getInstrCycles(DefInstr).Depth; LatencyOp = TSchedModel.computeOperandLatency( DefInstr, DefInstr->findRegisterDefOperandIdx(MO.getReg()), InstrPtr, InstrPtr->findRegisterUseOperandIdx(MO.getReg())); @@ -198,7 +198,7 @@ unsigned MachineCombiner::getLatency(MachineInstr *Root, MachineInstr *NewRoot, RI++; MachineInstr *UseMO = RI->getParent(); unsigned LatencyOp = 0; - if (UseMO && BlockTrace.isDepInTrace(*Root, *UseMO)) { + if (UseMO && BlockTrace.isDepInTrace(Root, UseMO)) { LatencyOp = TSchedModel.computeOperandLatency( NewRoot, NewRoot->findRegisterDefOperandIdx(MO.getReg()), UseMO, UseMO->findRegisterUseOperandIdx(MO.getReg())); @@ -250,7 +250,7 @@ bool MachineCombiner::improvesCriticalPathLen( // Get depth and latency of NewRoot and Root. unsigned NewRootDepth = getDepth(InsInstrs, InstrIdxForVirtReg, BlockTrace); - unsigned RootDepth = BlockTrace.getInstrCycles(*Root).Depth; + unsigned RootDepth = BlockTrace.getInstrCycles(Root).Depth; DEBUG(dbgs() << "DEPENDENCE DATA FOR " << Root << "\n"; dbgs() << " NewRootDepth: " << NewRootDepth << "\n"; @@ -269,7 +269,7 @@ bool MachineCombiner::improvesCriticalPathLen( // even if the instruction depths (data dependency cycles) become worse. unsigned NewRootLatency = getLatency(Root, NewRoot, BlockTrace); unsigned RootLatency = TSchedModel.computeInstrLatency(Root); - unsigned RootSlack = BlockTrace.getInstrSlack(*Root); + unsigned RootSlack = BlockTrace.getInstrSlack(Root); DEBUG(dbgs() << " NewRootLatency: " << NewRootLatency << "\n"; dbgs() << " RootLatency: " << RootLatency << "\n"; diff --git a/llvm/lib/CodeGen/MachineTraceMetrics.cpp b/llvm/lib/CodeGen/MachineTraceMetrics.cpp index e591c26..3814459 100644 --- a/llvm/lib/CodeGen/MachineTraceMetrics.cpp +++ b/llvm/lib/CodeGen/MachineTraceMetrics.cpp @@ -655,17 +655,17 @@ static bool getDataDeps(const MachineInstr *UseMI, // Get the input data dependencies of a PHI instruction, using Pred as the // preferred predecessor. // This will add at most one dependency to Deps. -static void getPHIDeps(const MachineInstr &UseMI, +static void getPHIDeps(const MachineInstr *UseMI, SmallVectorImpl &Deps, const MachineBasicBlock *Pred, const MachineRegisterInfo *MRI) { // No predecessor at the beginning of a trace. Ignore dependencies. if (!Pred) return; - assert(UseMI.isPHI() && UseMI.getNumOperands() % 2 && "Bad PHI"); - for (unsigned i = 1; i != UseMI.getNumOperands(); i += 2) { - if (UseMI.getOperand(i + 1).getMBB() == Pred) { - unsigned Reg = UseMI.getOperand(i).getReg(); + assert(UseMI->isPHI() && UseMI->getNumOperands() % 2 && "Bad PHI"); + for (unsigned i = 1; i != UseMI->getNumOperands(); i += 2) { + if (UseMI->getOperand(i + 1).getMBB() == Pred) { + unsigned Reg = UseMI->getOperand(i).getReg(); Deps.push_back(DataDep(MRI, Reg, i)); return; } @@ -827,7 +827,7 @@ computeInstrDepths(const MachineBasicBlock *MBB) { // Collect all data dependencies. Deps.clear(); if (UseMI.isPHI()) - getPHIDeps(UseMI, Deps, TBI.Pred, MTM.MRI); + getPHIDeps(&UseMI, Deps, TBI.Pred, MTM.MRI); else if (getDataDeps(&UseMI, Deps, MTM.MRI)) updatePhysDepsDownwards(&UseMI, Deps, RegUnits, MTM.TRI); @@ -1052,7 +1052,7 @@ computeInstrHeights(const MachineBasicBlock *MBB) { if (!PHI.isPHI()) break; Deps.clear(); - getPHIDeps(PHI, Deps, MBB, MTM.MRI); + getPHIDeps(&PHI, Deps, MBB, MTM.MRI); if (!Deps.empty()) { // Loop header PHI heights are all 0. unsigned Height = TBI.Succ ? Cycles.lookup(&PHI).Height : 0; @@ -1147,25 +1147,26 @@ MachineTraceMetrics::Ensemble::getTrace(const MachineBasicBlock *MBB) { } unsigned -MachineTraceMetrics::Trace::getInstrSlack(const MachineInstr &MI) const { - assert(getBlockNum() == unsigned(MI.getParent()->getNumber()) && +MachineTraceMetrics::Trace::getInstrSlack(const MachineInstr *MI) const { + assert(MI && "Not an instruction."); + assert(getBlockNum() == unsigned(MI->getParent()->getNumber()) && "MI must be in the trace center block"); InstrCycles Cyc = getInstrCycles(MI); return getCriticalPath() - (Cyc.Depth + Cyc.Height); } unsigned -MachineTraceMetrics::Trace::getPHIDepth(const MachineInstr &PHI) const { +MachineTraceMetrics::Trace::getPHIDepth(const MachineInstr *PHI) const { const MachineBasicBlock *MBB = TE.MTM.MF->getBlockNumbered(getBlockNum()); SmallVector Deps; getPHIDeps(PHI, Deps, MBB, TE.MTM.MRI); assert(Deps.size() == 1 && "PHI doesn't have MBB as a predecessor"); DataDep &Dep = Deps.front(); - unsigned DepCycle = getInstrCycles(*Dep.DefMI).Depth; + unsigned DepCycle = getInstrCycles(Dep.DefMI).Depth; // Add latency if DefMI is a real instruction. Transients get latency 0. if (!Dep.DefMI->isTransient()) - DepCycle += TE.MTM.SchedModel.computeOperandLatency(Dep.DefMI, Dep.DefOp, - &PHI, Dep.UseOp); + DepCycle += TE.MTM.SchedModel + .computeOperandLatency(Dep.DefMI, Dep.DefOp, PHI, Dep.UseOp); return DepCycle; } @@ -1251,13 +1252,13 @@ unsigned MachineTraceMetrics::Trace::getResourceLength( return std::max(Instrs, PRMax); } -bool MachineTraceMetrics::Trace::isDepInTrace(const MachineInstr &DefMI, - const MachineInstr &UseMI) const { - if (DefMI.getParent() == UseMI.getParent()) +bool MachineTraceMetrics::Trace::isDepInTrace(const MachineInstr *DefMI, + const MachineInstr *UseMI) const { + if (DefMI->getParent() == UseMI->getParent()) return true; - const TraceBlockInfo &DepTBI = TE.BlockInfo[DefMI.getParent()->getNumber()]; - const TraceBlockInfo &TBI = TE.BlockInfo[UseMI.getParent()->getNumber()]; + const TraceBlockInfo &DepTBI = TE.BlockInfo[DefMI->getParent()->getNumber()]; + const TraceBlockInfo &TBI = TE.BlockInfo[UseMI->getParent()->getNumber()]; return DepTBI.isUsefulDominator(TBI); }