#include "llvm/Transforms/IPO/LowerTypeTests.h"
#include "llvm/ADT/EquivalenceClasses.h"
+#include "llvm/ADT/SetVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/ADT/Triple.h"
#include "llvm/IR/Constant.h"
std::vector<ByteArrayInfo> ByteArrayInfos;
Mangler Mang;
+ Function *WeakInitializerFn = nullptr;
BitSetInfo
buildBitSet(Metadata *TypeId,
void buildBitSetsFromDisjointSet(ArrayRef<Metadata *> TypeIds,
ArrayRef<GlobalObject *> Globals);
+ void replaceWeakDeclarationWithJumpTablePtr(Function *F, Constant *JT);
+ void moveInitializerToModuleConstructor(GlobalVariable *GV);
+ void findGlobalVariableUsersOf(Constant *C,
+ SmallSetVector<GlobalVariable *, 8> &Out);
+
public:
LowerTypeTestsModule(Module &M);
bool lower();
report_fatal_error("Unsupported architecture for jump tables");
}
+void LowerTypeTestsModule::moveInitializerToModuleConstructor(
+ GlobalVariable *GV) {
+ if (WeakInitializerFn == nullptr) {
+ WeakInitializerFn = Function::Create(
+ FunctionType::get(Type::getVoidTy(M.getContext()),
+ /* IsVarArg */ false),
+ GlobalValue::InternalLinkage, "__cfi_global_var_init", &M);
+ BasicBlock *BB =
+ BasicBlock::Create(M.getContext(), "entry", WeakInitializerFn);
+ ReturnInst::Create(M.getContext(), BB);
+ WeakInitializerFn->setSection(
+ ObjectFormat == Triple::MachO
+ ? "__TEXT,__StaticInit,regular,pure_instructions"
+ : ".text.startup");
+ // This code is equivalent to relocation application, and should run at the
+ // earliest possible time (i.e. with the highest priority).
+ appendToGlobalCtors(M, WeakInitializerFn, /* Priority */ 0);
+ }
+
+ IRBuilder<> IRB(WeakInitializerFn->getEntryBlock().getTerminator());
+ GV->setConstant(false);
+ IRB.CreateAlignedStore(GV->getInitializer(), GV, GV->getAlignment());
+ GV->setInitializer(Constant::getNullValue(GV->getValueType()));
+}
+
+void LowerTypeTestsModule::findGlobalVariableUsersOf(
+ Constant *C, SmallSetVector<GlobalVariable *, 8> &Out) {
+ for (auto *U : C->users()){
+ if (auto *GV = dyn_cast<GlobalVariable>(U))
+ Out.insert(GV);
+ else if (auto *C2 = dyn_cast<Constant>(U))
+ findGlobalVariableUsersOf(C2, Out);
+ }
+}
+
+// Replace all uses of F with (F ? JT : 0).
+void LowerTypeTestsModule::replaceWeakDeclarationWithJumpTablePtr(
+ Function *F, Constant *JT) {
+ // The target expression can not appear in a constant initializer on most
+ // (all?) targets. Switch to a runtime initializer.
+ SmallSetVector<GlobalVariable *, 8> GlobalVarUsers;
+ findGlobalVariableUsersOf(F, GlobalVarUsers);
+ for (auto GV : GlobalVarUsers)
+ moveInitializerToModuleConstructor(GV);
+
+ // Can not RAUW F with an expression that uses F. Replace with a temporary
+ // placeholder first.
+ Function *PlaceholderFn =
+ Function::Create(cast<FunctionType>(F->getValueType()),
+ GlobalValue::ExternalWeakLinkage, "", &M);
+ F->replaceAllUsesWith(PlaceholderFn);
+
+ Constant *Target = ConstantExpr::getSelect(
+ ConstantExpr::getICmp(CmpInst::ICMP_NE, F,
+ Constant::getNullValue(F->getType())),
+ JT, Constant::getNullValue(F->getType()));
+ PlaceholderFn->replaceAllUsesWith(Target);
+ PlaceholderFn->eraseFromParent();
+}
+
/// Given a disjoint set of type identifiers and functions, build a jump table
/// for the functions, build the bit sets and lower the llvm.type.test calls.
void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
ArrayRef<Constant *>{ConstantInt::get(IntPtrTy, 0),
ConstantInt::get(IntPtrTy, I)}),
Functions[I]->getType());
- Functions[I]->replaceAllUsesWith(CombinedGlobalElemPtr);
- if (Functions[I]->isWeakForLinker())
+
+ if (Functions[I]->isWeakForLinker()) {
AsmOS << ".weak " << Functions[I]->getName() << "\n";
+ replaceWeakDeclarationWithJumpTablePtr(Functions[I],
+ CombinedGlobalElemPtr);
+ } else {
+ Functions[I]->replaceAllUsesWith(CombinedGlobalElemPtr);
+ }
} else {
assert(Functions[I]->getType()->getAddressSpace() == 0);
--- /dev/null
+; RUN: opt -S -lowertypetests -mtriple=i686-unknown-linux-gnu < %s | FileCheck %s
+; RUN: opt -S -lowertypetests -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck %s
+; RUN: opt -S -lowertypetests -mtriple=arm-unknown-linux-gnu < %s | FileCheck %s
+; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck %s
+
+target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
+target triple = "x86_64-unknown-linux-gnu"
+
+; CHECK: module asm ".weak f"
+
+; CHECK: @x = global void ()* null, align 8
+@x = global void ()* @f, align 8
+
+; CHECK: @x2 = global void ()* null, align 8
+@x2 = global void ()* @f, align 8
+
+; CHECK: @x3 = internal global void ()* null, align 8
+@x3 = internal constant void ()* @f, align 8
+
+; f + addend
+; CHECK: @x4 = global void ()* null, align 8
+@x4 = global void ()* bitcast (i8* getelementptr (i8, i8* bitcast (void ()* @f to i8*), i64 42) to void ()*), align 8
+
+; aggregate initializer
+; CHECK: @s = global { void ()*, void ()*, i32 } zeroinitializer, align 8
+@s = global { void ()*, void ()*, i32 } { void ()* @f, void ()* @f, i32 42 }, align 8
+
+; CHECK: @.cfi.jumptable = external hidden constant [1 x [{{.*}} x i8]]
+; CHECK: @llvm.global_ctors = appending global {{.*}}{ i32 0, void ()* @__cfi_global_var_init
+
+; CHECK: declare !type !0 extern_weak void @f()
+declare !type !0 extern_weak void @f()
+
+; CHECK: define zeroext i1 @check_f()
+define zeroext i1 @check_f() {
+entry:
+; CHECK: ret i1 icmp ne (void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), void ()* null)
+ ret i1 icmp ne (void ()* @f, void ()* null)
+}
+
+; CHECK: define void @call_f() {
+define void @call_f() {
+entry:
+; CHECK: call void select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null)()
+ call void @f()
+ ret void
+}
+
+declare i1 @llvm.type.test(i8* %ptr, metadata %bitset) nounwind readnone
+
+define i1 @foo(i8* %p) {
+ %x = call i1 @llvm.type.test(i8* %p, metadata !"typeid1")
+ ret i1 %x
+}
+
+; CHECK: define internal void @__cfi_global_var_init() section ".text.startup" {
+; CHECK-NEXT: entry:
+; CHECK-NEXT: store { void ()*, void ()*, i32 } { void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), i32 42 }, { void ()*, void ()*, i32 }* @s, align 8
+; CHECK-NEXT: store void ()* bitcast (i8* getelementptr (i8, i8* bitcast (void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null) to i8*), i64 42) to void ()*), void ()** @x4, align 8
+; CHECK-NEXT: store void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), void ()** @x3, align 8
+; CHECK-NEXT: store void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), void ()** @x2, align 8
+; CHECK-NEXT: store void ()* select (i1 icmp ne (void ()* @f, void ()* null), void ()* bitcast ({{.*}}@.cfi.jumptable to void ()*), void ()* null), void ()** @x, align 8
+; CHECK-NEXT: ret void
+; CHECK-NEXT: }
+
+
+!0 = !{i32 0, !"typeid1"}