/// optimize away.
unsigned getFlatAddressSpace() const;
+ /// Return any intrinsic address operand indexes which may be rewritten if
+ /// they use a flat address space pointer.
+ ///
+ /// \returns true if the intrinsic was handled.
+ bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const;
+
+ /// Rewrite intrinsic call \p II such that \p OldV will be replaced with \p
+ /// NewV, which has a different address space. This should happen for every
+ /// operand index that collectFlatAddressOperands returned for the intrinsic.
+ /// \returns true if the intrinsic /// was handled.
+ bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
+ Value *OldV, Value *NewV) const;
+
/// Test whether calls to a function lower to actual program function
/// calls.
///
virtual bool isSourceOfDivergence(const Value *V) = 0;
virtual bool isAlwaysUniform(const Value *V) = 0;
virtual unsigned getFlatAddressSpace() = 0;
+ virtual bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const = 0;
+ virtual bool rewriteIntrinsicWithAddressSpace(
+ IntrinsicInst *II, Value *OldV, Value *NewV) const = 0;
virtual bool isLoweredToCall(const Function *F) = 0;
virtual void getUnrollingPreferences(Loop *L, ScalarEvolution &,
UnrollingPreferences &UP) = 0;
return Impl.getFlatAddressSpace();
}
+ bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const override {
+ return Impl.collectFlatAddressOperands(OpIndexes, IID);
+ }
+
+ bool rewriteIntrinsicWithAddressSpace(
+ IntrinsicInst *II, Value *OldV, Value *NewV) const override {
+ return Impl.rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
+ }
+
bool isLoweredToCall(const Function *F) override {
return Impl.isLoweredToCall(F);
}
return -1;
}
+ bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const {
+ return false;
+ }
+
+ bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
+ Value *OldV, Value *NewV) const {
+ return false;
+ }
+
bool isLoweredToCall(const Function *F) {
assert(F && "A concrete function must be provided to this routine.");
return -1;
}
+ bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const {
+ return false;
+ }
+
+ bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
+ Value *OldV, Value *NewV) const {
+ return false;
+ }
+
bool isLegalAddImmediate(int64_t imm) {
return getTLI()->isLegalAddImmediate(imm);
}
return TTIImpl->getFlatAddressSpace();
}
+bool TargetTransformInfo::collectFlatAddressOperands(
+ SmallVectorImpl<int> &OpIndexes, Intrinsic::ID IID) const {
+ return TTIImpl->collectFlatAddressOperands(OpIndexes, IID);
+}
+
+bool TargetTransformInfo::rewriteIntrinsicWithAddressSpace(
+ IntrinsicInst *II, Value *OldV, Value *NewV) const {
+ return TTIImpl->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
+}
+
bool TargetTransformInfo::isLoweredToCall(const Function *F) const {
return TTIImpl->isLoweredToCall(F);
}
return false;
}
+bool GCNTTIImpl::collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const {
+ switch (IID) {
+ case Intrinsic::amdgcn_atomic_inc:
+ case Intrinsic::amdgcn_atomic_dec:
+ case Intrinsic::amdgcn_ds_fadd:
+ case Intrinsic::amdgcn_ds_fmin:
+ case Intrinsic::amdgcn_ds_fmax:
+ OpIndexes.push_back(0);
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool GCNTTIImpl::rewriteIntrinsicWithAddressSpace(
+ IntrinsicInst *II, Value *OldV, Value *NewV) const {
+ switch (II->getIntrinsicID()) {
+ case Intrinsic::amdgcn_atomic_inc:
+ case Intrinsic::amdgcn_atomic_dec:
+ case Intrinsic::amdgcn_ds_fadd:
+ case Intrinsic::amdgcn_ds_fmin:
+ case Intrinsic::amdgcn_ds_fmax: {
+ const ConstantInt *IsVolatile = cast<ConstantInt>(II->getArgOperand(4));
+ if (!IsVolatile->isZero())
+ return false;
+ Module *M = II->getParent()->getParent()->getParent();
+ Type *DestTy = II->getType();
+ Type *SrcTy = NewV->getType();
+ Function *NewDecl =
+ Intrinsic::getDeclaration(M, II->getIntrinsicID(), {DestTy, SrcTy});
+ II->setArgOperand(0, NewV);
+ II->setCalledFunction(NewDecl);
+ return true;
+ }
+ default:
+ return false;
+ }
+}
+
unsigned GCNTTIImpl::getShuffleCost(TTI::ShuffleKind Kind, Type *Tp, int Index,
Type *SubTp) {
if (ST->hasVOP3PInsts()) {
return AMDGPUAS::FLAT_ADDRESS;
}
+ bool collectFlatAddressOperands(SmallVectorImpl<int> &OpIndexes,
+ Intrinsic::ID IID) const;
+ bool rewriteIntrinsicWithAddressSpace(IntrinsicInst *II,
+ Value *OldV, Value *NewV) const;
+
unsigned getVectorSplitCost() { return 0; }
unsigned getShuffleCost(TTI::ShuffleKind Kind, Type *Tp, int Index,
/// InferAddressSpaces
class InferAddressSpaces : public FunctionPass {
+ const TargetTransformInfo *TTI;
+
/// Target specific address space which uses of should be replaced if
/// possible.
unsigned FlatAddrSpace;
Module *M = II->getParent()->getParent()->getParent();
switch (II->getIntrinsicID()) {
- case Intrinsic::amdgcn_atomic_inc:
- case Intrinsic::amdgcn_atomic_dec:
- case Intrinsic::amdgcn_ds_fadd:
- case Intrinsic::amdgcn_ds_fmin:
- case Intrinsic::amdgcn_ds_fmax: {
- const ConstantInt *IsVolatile = cast<ConstantInt>(II->getArgOperand(4));
- if (!IsVolatile->isZero())
- return false;
-
- LLVM_FALLTHROUGH;
- }
case Intrinsic::objectsize: {
Type *DestTy = II->getType();
Type *SrcTy = NewV->getType();
return true;
}
default:
- return false;
+ return TTI->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
}
}
-// TODO: Move logic to TTI?
void InferAddressSpaces::collectRewritableIntrinsicOperands(
IntrinsicInst *II, std::vector<std::pair<Value *, bool>> &PostorderStack,
DenseSet<Value *> &Visited) const {
- switch (II->getIntrinsicID()) {
+ auto IID = II->getIntrinsicID();
+ switch (IID) {
case Intrinsic::objectsize:
- case Intrinsic::amdgcn_atomic_inc:
- case Intrinsic::amdgcn_atomic_dec:
- case Intrinsic::amdgcn_ds_fadd:
- case Intrinsic::amdgcn_ds_fmin:
- case Intrinsic::amdgcn_ds_fmax:
appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),
PostorderStack, Visited);
break;
default:
+ SmallVector<int, 2> OpIndexes;
+ if (TTI->collectFlatAddressOperands(OpIndexes, IID)) {
+ for (int Idx : OpIndexes) {
+ appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(Idx),
+ PostorderStack, Visited);
+ }
+ }
break;
}
}
if (skipFunction(F))
return false;
- const TargetTransformInfo &TTI =
- getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
+ TTI = &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
if (FlatAddrSpace == UninitializedAddressSpace) {
- FlatAddrSpace = TTI.getFlatAddressSpace();
+ FlatAddrSpace = TTI->getFlatAddressSpace();
if (FlatAddrSpace == UninitializedAddressSpace)
return false;
}
// Changes the address spaces of the flat address expressions who are inferred
// to point to a specific address space.
- return rewriteWithNewAddressSpaces(TTI, Postorder, InferredAddrSpace, &F);
+ return rewriteWithNewAddressSpaces(*TTI, Postorder, InferredAddrSpace, &F);
}
// Constants need to be tracked through RAUW to handle cases with nested