assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
SmallVector<const Use *, Threshold> Worklist;
SmallSet<const Use *, Threshold> Visited;
- int Count = 0;
- for (const Use &U : V->uses()) {
- // If there are lots of uses, conservatively say that the value
- // is captured to avoid taking too much compile time.
- if (Count++ >= Threshold)
- return Tracker->tooManyUses();
-
- if (!Tracker->shouldExplore(&U)) continue;
- Visited.insert(&U);
- Worklist.push_back(&U);
- }
+ auto AddUses = [&](const Value *V) {
+ int Count = 0;
+ for (const Use &U : V->uses()) {
+ // If there are lots of uses, conservatively say that the value
+ // is captured to avoid taking too much compile time.
+ if (Count++ >= Threshold)
+ return Tracker->tooManyUses();
+ if (!Visited.insert(&U).second)
+ continue;
+ if (!Tracker->shouldExplore(&U))
+ continue;
+ Worklist.push_back(&U);
+ }
+ };
+ AddUses(V);
while (!Worklist.empty()) {
const Use *U = Worklist.pop_back_val();
if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
break;
+ // launder.invariant.group only captures pointer by returning it,
+ // so the pointer wasn't captured if returned pointer is not captured.
+ if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) {
+ AddUses(I);
+ break;
+ }
+
// Volatile operations effectively capture the memory location that they
// load and store to.
if (auto *MI = dyn_cast<MemIntrinsic>(I))
case Instruction::Select:
case Instruction::AddrSpaceCast:
// The original value is not captured via this if the new value isn't.
- Count = 0;
- for (Use &UU : I->uses()) {
- // If there are lots of uses, conservatively say that the value
- // is captured to avoid taking too much compile time.
- if (Count++ >= Threshold)
- return Tracker->tooManyUses();
-
- if (Visited.insert(&UU).second)
- if (Tracker->shouldExplore(&UU))
- Worklist.push_back(&UU);
- }
+ AddUses(I);
break;
case Instruction::ICmp: {
// Don't count comparisons of a no-alias return value against null as
store volatile i32 0, i32* %gep, align 4
ret void
}
+
+; CHECK: nocaptureLaunder(i8* nocapture %p)
+define void @nocaptureLaunder(i8* %p) {
+entry:
+ %b = call i8* @llvm.launder.invariant.group.p0i8(i8* %p)
+ store i8 42, i8* %b
+ ret void
+}
+
+@g2 = global i8* null
+; CHECK: define void @captureLaunder(i8* %p)
+define void @captureLaunder(i8* %p) {
+ %b = call i8* @llvm.launder.invariant.group.p0i8(i8* %p)
+ store i8* %b, i8** @g2
+ ret void
+}
+
+declare i8* @llvm.launder.invariant.group.p0i8(i8*)