bool NonTrivialToPrimitiveCopy : 1;
bool NonTrivialToPrimitiveDestroy : 1;
+ /// True if this class can be passed in a non-address-preserving fashion
+ /// (such as in registers).
+ /// This does not imply anything about how the ABI in use will actually
+ /// pass an object of this class.
+ bool CanPassInRegisters : 1;
+
protected:
RecordDecl(Kind DK, TagKind TK, const ASTContext &C, DeclContext *DC,
SourceLocation StartLoc, SourceLocation IdLoc,
NonTrivialToPrimitiveDestroy = V;
}
+ /// Determine whether this class can be passed in registers. In C++ mode,
+ /// it must have at least one trivial, non-deleted copy or move constructor.
+ /// FIXME: This should be set as part of completeDefinition.
+ bool canPassInRegisters() const {
+ return CanPassInRegisters;
+ }
+
+ /// Set that we can pass this RecordDecl in registers.
+ void setCanPassInRegisters(bool CanPass) {
+ CanPassInRegisters = CanPass;
+ }
+
/// \brief Determines whether this declaration represents the
/// injected class name.
///
/// constructor.
unsigned HasDefaultedDefaultConstructor : 1;
- /// \brief True if this class can be passed in a non-address-preserving
- /// fashion (such as in registers) according to the C++ language rules.
- /// This does not imply anything about how the ABI in use will actually
- /// pass an object of this class.
- unsigned CanPassInRegisters : 1;
-
/// \brief True if a defaulted default constructor for this class would
/// be constexpr.
unsigned DefaultedDefaultConstructorIsConstexpr : 1;
return data().HasIrrelevantDestructor;
}
- /// \brief Determine whether this class has at least one trivial, non-deleted
- /// copy or move constructor.
- bool canPassInRegisters() const {
- return data().CanPassInRegisters;
- }
-
- /// \brief Set that we can pass this RecordDecl in registers.
- // FIXME: This should be set as part of completeDefinition.
- void setCanPassInRegisters(bool CanPass) {
- data().CanPassInRegisters = CanPass;
- }
-
/// Determine whether the triviality for the purpose of calls for this class
/// is overridden to be trivial because this class or the type of one of its
/// subobjects has attribute "trivial_abi".
/// with the ARC __strong qualifier.
PDIK_ARCStrong,
+ /// The type is an Objective-C retainable pointer type that is qualified
+ /// with the ARC __weak qualifier.
+ PDIK_ARCWeak,
+
/// The type is a struct containing a field whose type is not PCK_Trivial.
PDIK_Struct
};
/// with the ARC __strong qualifier.
PCK_ARCStrong,
+ /// The type is an Objective-C retainable pointer type that is qualified
+ /// with the ARC __weak qualifier.
+ PCK_ARCWeak,
+
/// The type is a struct containing a field whose type is neither
/// PCK_Trivial nor PCK_VolatileTrivial.
/// Note that a C++ struct type does not necessarily match this; C++ copying
/// source object is placed in an uninitialized state.
PrimitiveCopyKind isNonTrivialToPrimitiveDestructiveMove() const;
+ bool canPassInRegisters() const;
+
enum DestructionKind {
DK_none,
DK_cxx_destructor,
= FromData.HasConstexprNonCopyMoveConstructor;
ToData.HasDefaultedDefaultConstructor
= FromData.HasDefaultedDefaultConstructor;
- ToData.CanPassInRegisters = FromData.CanPassInRegisters;
ToData.DefaultedDefaultConstructorIsConstexpr
= FromData.DefaultedDefaultConstructorIsConstexpr;
ToData.HasConstexprDefaultConstructor
HasObjectMember(false), HasVolatileMember(false),
LoadedFieldsFromExternalStorage(false),
NonTrivialToPrimitiveDefaultInitialize(false),
- NonTrivialToPrimitiveCopy(false), NonTrivialToPrimitiveDestroy(false) {
+ NonTrivialToPrimitiveCopy(false), NonTrivialToPrimitiveDestroy(false),
+ CanPassInRegisters(true) {
assert(classof(static_cast<Decl*>(this)) && "Invalid Kind!");
}
DeclaredNonTrivialSpecialMembersForCall(0), HasIrrelevantDestructor(true),
HasConstexprNonCopyMoveConstructor(false),
HasDefaultedDefaultConstructor(false),
- CanPassInRegisters(true),
DefaultedDefaultConstructorIsConstexpr(true),
HasConstexprDefaultConstructor(false),
HasNonLiteralTypeFieldsOrBases(false), ComputedVisibleConversions(false),
if (RT->getDecl()->isNonTrivialToPrimitiveDefaultInitialize())
return PDIK_Struct;
- Qualifiers::ObjCLifetime Lifetime = getQualifiers().getObjCLifetime();
- if (Lifetime == Qualifiers::OCL_Strong)
+ switch (getQualifiers().getObjCLifetime()) {
+ case Qualifiers::OCL_Strong:
return PDIK_ARCStrong;
-
- return PDIK_Trivial;
+ case Qualifiers::OCL_Weak:
+ return PDIK_ARCWeak;
+ default:
+ return PDIK_Trivial;
+ }
}
QualType::PrimitiveCopyKind QualType::isNonTrivialToPrimitiveCopy() const {
return PCK_Struct;
Qualifiers Qs = getQualifiers();
- if (Qs.getObjCLifetime() == Qualifiers::OCL_Strong)
+ switch (Qs.getObjCLifetime()) {
+ case Qualifiers::OCL_Strong:
return PCK_ARCStrong;
-
- return Qs.hasVolatile() ? PCK_VolatileTrivial : PCK_Trivial;
+ case Qualifiers::OCL_Weak:
+ return PCK_ARCWeak;
+ default:
+ return Qs.hasVolatile() ? PCK_VolatileTrivial : PCK_Trivial;
+ }
}
QualType::PrimitiveCopyKind
return isNonTrivialToPrimitiveCopy();
}
+bool QualType::canPassInRegisters() const {
+ if (const auto *RT =
+ getTypePtr()->getBaseElementTypeUnsafe()->getAs<RecordType>())
+ return RT->getDecl()->canPassInRegisters();
+
+ if (getQualifiers().getObjCLifetime() == Qualifiers::OCL_Weak)
+ return false;
+
+ return true;
+}
+
bool Type::isLiteralType(const ASTContext &Ctx) const {
if (isDependentType())
return false;
case QualType::PCK_Struct:
return std::make_pair(BlockCaptureEntityKind::NonTrivialCStruct,
BlockFieldFlags());
+ case QualType::PCK_ARCWeak:
+ // We need to register __weak direct captures with the runtime.
+ return std::make_pair(BlockCaptureEntityKind::ARCWeak, Flags);
case QualType::PCK_ARCStrong:
// We need to retain the copied value for __strong direct captures.
// If it's a block pointer, we have to copy the block and assign that to
// Special rules for ARC captures:
Qualifiers QS = T.getQualifiers();
- // We need to register __weak direct captures with the runtime.
- if (QS.getObjCLifetime() == Qualifiers::OCL_Weak)
- return std::make_pair(BlockCaptureEntityKind::ARCWeak, Flags);
-
// Non-ARC captures of retainable pointers are strong and
// therefore require a call to _Block_object_assign.
if (!QS.getObjCLifetime() && !LangOpts.ObjCAutoRefCount)
switch (PDIK) {
case QualType::PDIK_ARCStrong:
return asDerived().visitARCStrong(FT, std::forward<Ts>(Args)...);
+ case QualType::PDIK_ARCWeak:
+ return asDerived().visitARCWeak(FT, std::forward<Ts>(Args)...);
case QualType::PDIK_Struct:
return asDerived().visitStruct(FT, std::forward<Ts>(Args)...);
case QualType::PDIK_Trivial:
switch (PCK) {
case QualType::PCK_ARCStrong:
return asDerived().visitARCStrong(FT, std::forward<Ts>(Args)...);
+ case QualType::PCK_ARCWeak:
+ return asDerived().visitARCWeak(FT, std::forward<Ts>(Args)...);
case QualType::PCK_Struct:
return asDerived().visitStruct(FT, std::forward<Ts>(Args)...);
case QualType::PCK_Trivial:
template <class... Ts> void visitTrivial(Ts... Args) {}
- template <class... Ts> void visitARCWeak(Ts... Args) {
- // FIXME: remove this when visitARCWeak is implemented in the subclasses.
- llvm_unreachable("weak field is not expected");
- }
-
template <class... Ts> void visitCXXDestructor(Ts... Args) {
llvm_unreachable("field of a C++ struct type is not expected");
}
appendStr(getVolatileOffsetStr(FT.isVolatileQualified(), FieldOffset));
}
+ void visitARCWeak(QualType FT, const FieldDecl *FD,
+ CharUnits CurStructOffset) {
+ appendStr("_w");
+ CharUnits FieldOffset = CurStructOffset + asDerived().getFieldOffset(FD);
+ appendStr(getVolatileOffsetStr(FT.isVolatileQualified(), FieldOffset));
+ }
+
void visitStruct(QualType QT, const FieldDecl *FD,
CharUnits CurStructOffset) {
CharUnits FieldOffset = CurStructOffset + asDerived().getFieldOffset(FD);
*CGF, getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD), QT);
}
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 1> Addrs) {
+ CGF->destroyARCWeak(
+ *CGF, getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD), QT);
+ }
+
void callSpecialFunction(QualType FT, CharUnits Offset,
std::array<Address, 1> Addrs) {
CGF->callCStructDestructor(
getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD), QT);
}
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 1> Addrs) {
+ CGF->EmitNullInitialization(
+ getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD), QT);
+ }
+
template <class FieldKind, size_t... Is>
void visitArray(FieldKind FK, QualType QT, const FieldDecl *FD,
CharUnits CurStackOffset, std::array<Address, 1> Addrs) {
llvm::Value *Val = CGF->EmitARCRetain(QT, SrcVal);
CGF->EmitStoreOfScalar(Val, CGF->MakeAddrLValue(Addrs[DstIdx], QT), true);
}
+
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 2> Addrs) {
+ Addrs[DstIdx] = getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD);
+ Addrs[SrcIdx] = getAddrWithOffset(Addrs[SrcIdx], CurStackOffset, FD);
+ CGF->EmitARCCopyWeak(Addrs[DstIdx], Addrs[SrcIdx]);
+ }
+
void callSpecialFunction(QualType FT, CharUnits Offset,
std::array<Address, 2> Addrs) {
CGF->callCStructCopyConstructor(CGF->MakeAddrLValue(Addrs[DstIdx], FT),
CGF->EmitStoreOfScalar(SrcVal, CGF->MakeAddrLValue(Addrs[DstIdx], QT),
/* isInitialization */ true);
}
+
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 2> Addrs) {
+ Addrs[DstIdx] = getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD);
+ Addrs[SrcIdx] = getAddrWithOffset(Addrs[SrcIdx], CurStackOffset, FD);
+ CGF->EmitARCMoveWeak(Addrs[DstIdx], Addrs[SrcIdx]);
+ }
+
void callSpecialFunction(QualType FT, CharUnits Offset,
std::array<Address, 2> Addrs) {
CGF->callCStructMoveConstructor(CGF->MakeAddrLValue(Addrs[DstIdx], FT),
CGF->EmitARCStoreStrong(CGF->MakeAddrLValue(Addrs[DstIdx], QT), SrcVal,
false);
}
+
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 2> Addrs) {
+ Addrs[DstIdx] = getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD);
+ Addrs[SrcIdx] = getAddrWithOffset(Addrs[SrcIdx], CurStackOffset, FD);
+ CGF->emitARCCopyAssignWeak(QT, Addrs[DstIdx], Addrs[SrcIdx]);
+ }
+
void callSpecialFunction(QualType FT, CharUnits Offset,
std::array<Address, 2> Addrs) {
CGF->callCStructCopyAssignmentOperator(
CGF->EmitARCRelease(DstVal, ARCImpreciseLifetime);
}
+ void visitARCWeak(QualType QT, const FieldDecl *FD, CharUnits CurStackOffset,
+ std::array<Address, 2> Addrs) {
+ Addrs[DstIdx] = getAddrWithOffset(Addrs[DstIdx], CurStackOffset, FD);
+ Addrs[SrcIdx] = getAddrWithOffset(Addrs[SrcIdx], CurStackOffset, FD);
+ CGF->emitARCMoveAssignWeak(QT, Addrs[DstIdx], Addrs[SrcIdx]);
+ }
+
void callSpecialFunction(QualType FT, CharUnits Offset,
std::array<Address, 2> Addrs) {
CGF->callCStructMoveAssignmentOperator(
"objc_copyWeak");
}
+void CodeGenFunction::emitARCCopyAssignWeak(QualType Ty, Address DstAddr,
+ Address SrcAddr) {
+ llvm::Value *Object = EmitARCLoadWeakRetained(SrcAddr);
+ Object = EmitObjCConsumeObject(Ty, Object);
+ EmitARCStoreWeak(DstAddr, Object, false);
+}
+
+void CodeGenFunction::emitARCMoveAssignWeak(QualType Ty, Address DstAddr,
+ Address SrcAddr) {
+ llvm::Value *Object = EmitARCLoadWeakRetained(SrcAddr);
+ Object = EmitObjCConsumeObject(Ty, Object);
+ EmitARCStoreWeak(DstAddr, Object, false);
+ EmitARCDestroyWeak(SrcAddr);
+}
+
/// Produce the code to do a objc_autoreleasepool_push.
/// call i8* \@objc_autoreleasePoolPush(void)
llvm::Value *CodeGenFunction::EmitObjCAutoreleasePoolPush() {
llvm::Value *EmitARCLoadWeak(Address addr);
llvm::Value *EmitARCLoadWeakRetained(Address addr);
llvm::Value *EmitARCStoreWeak(Address addr, llvm::Value *value, bool ignored);
+ void emitARCCopyAssignWeak(QualType Ty, Address DstAddr, Address SrcAddr);
+ void emitARCMoveAssignWeak(QualType Ty, Address DstAddr, Address SrcAddr);
void EmitARCCopyWeak(Address dst, Address src);
void EmitARCMoveWeak(Address dst, Address src);
llvm::Value *EmitARCRetainAutorelease(QualType type, llvm::Value *value);
static CGCXXABI::RecordArgABI getRecordArgABI(const RecordType *RT,
CGCXXABI &CXXABI) {
const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl());
- if (!RD)
+ if (!RD) {
+ if (!RT->getDecl()->canPassInRegisters())
+ return CGCXXABI::RAA_Indirect;
return CGCXXABI::RAA_Default;
+ }
return CXXABI.getRecordArgABI(RD);
}
return getRecordArgABI(RT, CXXABI);
}
+static bool classifyReturnType(const CGCXXABI &CXXABI, CGFunctionInfo &FI,
+ const ABIInfo &Info) {
+ QualType Ty = FI.getReturnType();
+
+ if (const auto *RT = Ty->getAs<RecordType>())
+ if (!isa<CXXRecordDecl>(RT->getDecl()) &&
+ !RT->getDecl()->canPassInRegisters()) {
+ FI.getReturnInfo() = Info.getNaturalAlignIndirect(Ty);
+ return true;
+ }
+
+ return CXXABI.classifyReturnType(FI);
+}
+
/// Pass transparent unions as if they were the type of the first element. Sema
/// should ensure that all elements of the union have the same "machine type".
static QualType useFirstFieldIfTransparentUnion(QualType Ty) {
} else
State.FreeRegs = DefaultNumRegisterParameters;
- if (!getCXXABI().classifyReturnType(FI)) {
+ if (!::classifyReturnType(getCXXABI(), FI, *this)) {
FI.getReturnInfo() = classifyReturnType(FI.getReturnType(), State);
} else if (FI.getReturnInfo().isIndirect()) {
// The C++ ABI is not aware of register usage, so we have to check if the
unsigned FreeSSERegs = IsRegCall ? 16 : 8;
unsigned NeededInt, NeededSSE;
- if (!getCXXABI().classifyReturnType(FI)) {
+ if (!::classifyReturnType(getCXXABI(), FI, *this)) {
if (IsRegCall && FI.getReturnType()->getTypePtr()->isRecordType() &&
!FI.getReturnType()->getTypePtr()->isUnionType()) {
FI.getReturnInfo() =
bool isIllegalVectorType(QualType Ty) const;
void computeInfo(CGFunctionInfo &FI) const override {
- if (!getCXXABI().classifyReturnType(FI))
+ if (!::classifyReturnType(getCXXABI(), FI, *this))
FI.getReturnInfo() = classifyReturnType(FI.getReturnType());
for (auto &it : FI.arguments())
}
void ARMABIInfo::computeInfo(CGFunctionInfo &FI) const {
- if (!getCXXABI().classifyReturnType(FI))
+ if (!::classifyReturnType(getCXXABI(), FI, *this))
FI.getReturnInfo() =
classifyReturnType(FI.getReturnType(), FI.isVariadic());
// Get the type for the field.
const Type *FDTy = FD->getType().getTypePtr();
- Qualifiers QS = FD->getType().getQualifiers();
if (!FD->isAnonymousStructOrUnion()) {
// Remember all fields written by the user.
QualType T = Context.getObjCObjectPointerType(FD->getType());
FD->setType(T);
} else if (getLangOpts().allowsNonTrivialObjCLifetimeQualifiers() &&
- Record && !ObjCFieldLifetimeErrReported &&
- ((!getLangOpts().CPlusPlus &&
- QS.getObjCLifetime() == Qualifiers::OCL_Weak) ||
- Record->isUnion())) {
+ Record && !ObjCFieldLifetimeErrReported && Record->isUnion()) {
// It's an error in ARC or Weak if a field has lifetime.
// We don't want to report this in a system header, though,
// so we just make the field unavailable.
Record->setNonTrivialToPrimitiveCopy(true);
if (FT.isDestructedType())
Record->setNonTrivialToPrimitiveDestroy(true);
+ if (!FT.canPassInRegisters())
+ Record->setCanPassInRegisters(false);
}
if (Record && FD->getType().isVolatileQualified())
RD->setNonTrivialToPrimitiveDefaultInitialize(Record.readInt());
RD->setNonTrivialToPrimitiveCopy(Record.readInt());
RD->setNonTrivialToPrimitiveDestroy(Record.readInt());
+ RD->setCanPassInRegisters(Record.readInt());
return Redecl;
}
Data.HasIrrelevantDestructor = Record.readInt();
Data.HasConstexprNonCopyMoveConstructor = Record.readInt();
Data.HasDefaultedDefaultConstructor = Record.readInt();
- Data.CanPassInRegisters = Record.readInt();
Data.DefaultedDefaultConstructorIsConstexpr = Record.readInt();
Data.HasConstexprDefaultConstructor = Record.readInt();
Data.HasNonLiteralTypeFieldsOrBases = Record.readInt();
MATCH_FIELD(HasIrrelevantDestructor)
OR_FIELD(HasConstexprNonCopyMoveConstructor)
OR_FIELD(HasDefaultedDefaultConstructor)
- MATCH_FIELD(CanPassInRegisters)
MATCH_FIELD(DefaultedDefaultConstructorIsConstexpr)
OR_FIELD(HasConstexprDefaultConstructor)
MATCH_FIELD(HasNonLiteralTypeFieldsOrBases)
bool HadRealDefinition =
OldDD && (OldDD->Definition != RD ||
!Reader.PendingFakeDefinitionData.count(OldDD));
+ RD->setCanPassInRegisters(Record.readInt());
ReadCXXRecordDefinition(RD, /*Update*/true);
// Visible update is handled separately.
case UPD_CXX_INSTANTIATED_CLASS_DEFINITION: {
auto *RD = cast<CXXRecordDecl>(D);
UpdatedDeclContexts.insert(RD->getPrimaryContext());
+ Record.push_back(RD->canPassInRegisters());
Record.AddCXXDefinitionData(RD);
Record.AddOffset(WriteDeclContextLexicalBlock(
*Context, const_cast<CXXRecordDecl *>(RD)));
Record->push_back(Data.HasIrrelevantDestructor);
Record->push_back(Data.HasConstexprNonCopyMoveConstructor);
Record->push_back(Data.HasDefaultedDefaultConstructor);
- Record->push_back(Data.CanPassInRegisters);
Record->push_back(Data.DefaultedDefaultConstructorIsConstexpr);
Record->push_back(Data.HasConstexprDefaultConstructor);
Record->push_back(Data.HasNonLiteralTypeFieldsOrBases);
Record.push_back(D->isNonTrivialToPrimitiveDefaultInitialize());
Record.push_back(D->isNonTrivialToPrimitiveCopy());
Record.push_back(D->isNonTrivialToPrimitiveDestroy());
+ Record.push_back(D->canPassInRegisters());
if (D->getDeclContext() == D->getLexicalDeclContext() &&
!D->hasAttrs() &&
Abv->Add(BitCodeAbbrevOp(BitCodeAbbrevOp::Fixed, 1));
// isNonTrivialToPrimitiveDestroy
Abv->Add(BitCodeAbbrevOp(BitCodeAbbrevOp::Fixed, 1));
+ Abv->Add(BitCodeAbbrevOp(BitCodeAbbrevOp::Fixed, 1)); // canPassInRegisters
// DC
Abv->Add(BitCodeAbbrevOp(BitCodeAbbrevOp::VBR, 6)); // LexicalOffset
-// RUN: %clang_cc1 -triple arm64-apple-ios11 -fobjc-arc -fblocks -fobjc-runtime=ios-11.0 -fobjc-exceptions -fexceptions -fobjc-arc-exceptions -emit-llvm -o - %s | FileCheck %s
+// RUN: %clang_cc1 -triple arm64-apple-ios11 -fobjc-arc -fblocks -fobjc-runtime=ios-11.0 -fobjc-exceptions -fexceptions -emit-llvm -o - %s | FileCheck %s
// CHECK: %[[STRUCT_STRONG:.*]] = type { i32, i8* }
+// CHECK: %[[STRUCT_WEAK:.*]] = type { i32, i8* }
typedef struct {
int i;
id f1;
} Strong;
+typedef struct {
+ int i;
+ __weak id f1;
+} Weak;
+
// CHECK: define void @testStrongException()
// CHECK: %[[AGG_TMP:.*]] = alloca %[[STRUCT_STRONG]], align 8
// CHECK: %[[AGG_TMP1:.*]] = alloca %[[STRUCT_STRONG]], align 8
void testStrongException(void) {
calleeStrong(genStrong(), genStrong());
}
+
+// CHECK: define void @testWeakException()
+// CHECK: %[[AGG_TMP:.*]] = alloca %[[STRUCT_WEAK]], align 8
+// CHECK: %[[AGG_TMP1:.*]] = alloca %[[STRUCT_WEAK]], align 8
+// CHECK: call void @genWeak(%[[STRUCT_WEAK]]* sret %[[AGG_TMP]])
+// CHECK: invoke void @genWeak(%[[STRUCT_WEAK]]* sret %[[AGG_TMP1]])
+
+// CHECK: call void @calleeWeak(%[[STRUCT_WEAK]]* %[[AGG_TMP]], %[[STRUCT_WEAK]]* %[[AGG_TMP1]])
+// CHECK: ret void
+
+// CHECK: landingpad { i8*, i32 }
+// CHECK: %[[V3:.*]] = bitcast %[[STRUCT_WEAK]]* %[[AGG_TMP]] to i8**
+// CHECK: call void @__destructor_8_w8(i8** %[[V3]])
+// CHECK: br label
+
+// CHECK: resume
+
+Weak genWeak(void);
+void calleeWeak(Weak, Weak);
+
+void testWeakException(void) {
+ calleeWeak(genWeak(), genWeak());
+}
--- /dev/null
+// RUN: %clang_cc1 -triple arm64-apple-ios11 -fobjc-arc -fblocks -fobjc-runtime=ios-11.0 -emit-llvm -o - %s | FileCheck -check-prefix=ARM64 -check-prefix=COMMON %s
+// RUN: %clang_cc1 -triple thumbv7-apple-ios10 -fobjc-arc -fblocks -fobjc-runtime=ios-10.0 -emit-llvm -o - %s | FileCheck -check-prefix=COMMON %s
+// RUN: %clang_cc1 -triple x86_64-apple-macosx10.13 -fobjc-arc -fblocks -fobjc-runtime=macosx-10.13.0 -emit-llvm -o - %s | FileCheck -check-prefix=COMMON %s
+// RUN: %clang_cc1 -triple i386-apple-macosx10.13.0 -fobjc-arc -fblocks -fobjc-runtime=macosx-fragile-10.13.0 -emit-llvm -o - %s | FileCheck -check-prefix=COMMON %s
+
+typedef void (^BlockTy)(void);
+
+// COMMON: %[[STRUCT_WEAK:.*]] = type { i32, i8* }
+
+typedef struct {
+ int f0;
+ __weak id f1;
+} Weak;
+
+Weak getWeak(void);
+void calleeWeak(Weak);
+
+// ARM64: define void @test_constructor_destructor_Weak()
+// ARM64: %[[T:.*]] = alloca %[[STRUCT_WEAK]], align 8
+// ARM64: %[[V0:.*]] = bitcast %[[STRUCT_WEAK]]* %[[T]] to i8**
+// ARM64: call void @__default_constructor_8_w8(i8** %[[V0]])
+// ARM64: %[[V1:.*]] = bitcast %[[STRUCT_WEAK]]* %[[T]] to i8**
+// ARM64: call void @__destructor_8_w8(i8** %[[V1]])
+// ARM64: ret void
+
+// ARM64: define linkonce_odr hidden void @__default_constructor_8_w8(i8** %[[DST:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V2:.*]] = getelementptr inbounds i8, i8* %[[V1]], i64 8
+// ARM64: %[[V3:.*]] = bitcast i8* %[[V2]] to i8**
+// ARM64: %[[V4:.*]] = bitcast i8** %[[V3]] to i8*
+// ARM64: call void @llvm.memset.p0i8.i64(i8* align 8 %[[V4]], i8 0, i64 8, i1 false)
+
+// ARM64: define linkonce_odr hidden void @__destructor_8_w8(i8** %[[DST:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1:.*]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V2:.*]] = getelementptr inbounds i8, i8* %[[V1]], i64 8
+// ARM64: %[[V3:.*]] = bitcast i8* %[[V2]] to i8**
+// ARM64: call void @objc_destroyWeak(i8** %[[V3]])
+
+void test_constructor_destructor_Weak(void) {
+ Weak t;
+}
+
+// ARM64: define void @test_copy_constructor_Weak(%[[STRUCT_WEAK]]* %{{.*}})
+// ARM64: call void @__copy_constructor_8_8_t0w4_w8(i8** %{{.*}}, i8** %{{.*}})
+// ARM64: call void @__destructor_8_w8(i8** %{{.*}})
+
+// ARM64: define linkonce_odr hidden void @__copy_constructor_8_8_t0w4_w8(i8** %[[DST:.*]], i8** %[[SRC:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: %[[SRC_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: store i8** %[[SRC]], i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1:.*]] = load i8**, i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V2:.*]] = bitcast i8** %[[V0]] to i32*
+// ARM64: %[[V3:.*]] = bitcast i8** %[[V1]] to i32*
+// ARM64: %[[V4:.*]] = load i32, i32* %[[V3]], align 8
+// ARM64: store i32 %[[V4]], i32* %[[V2]], align 8
+// ARM64: %[[V5:.*]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V6:.*]] = getelementptr inbounds i8, i8* %[[V5]], i64 8
+// ARM64: %[[V7:.*]] = bitcast i8* %[[V6]] to i8**
+// ARM64: %[[V8:.*]] = bitcast i8** %[[V1]] to i8*
+// ARM64: %[[V9:.*]] = getelementptr inbounds i8, i8* %[[V8]], i64 8
+// ARM64: %[[V10:.*]] = bitcast i8* %[[V9]] to i8**
+// ARM64: call void @objc_copyWeak(i8** %[[V7]], i8** %[[V10]])
+
+void test_copy_constructor_Weak(Weak *s) {
+ Weak t = *s;
+}
+
+// ARM64: define void @test_copy_assignment_Weak(%[[STRUCT_WEAK]]* %{{.*}}, %[[STRUCT_WEAK]]* %{{.*}})
+// ARM64: call void @__copy_assignment_8_8_t0w4_w8(i8** %{{.*}}, i8** %{{.*}})
+
+// ARM64: define linkonce_odr hidden void @__copy_assignment_8_8_t0w4_w8(i8** %[[DST:.*]], i8** %[[SRC:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: %[[SRC_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: store i8** %[[SRC]], i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1:.*]] = load i8**, i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V2:.*]] = bitcast i8** %[[V0]] to i32*
+// ARM64: %[[V3:.*]] = bitcast i8** %[[V1]] to i32*
+// ARM64: %[[V4:.*]] = load i32, i32* %[[V3]], align 8
+// ARM64: store i32 %[[V4]], i32* %[[V2]], align 8
+// ARM64: %[[V5:.*]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V6:.*]] = getelementptr inbounds i8, i8* %[[V5]], i64 8
+// ARM64: %[[V7:.*]] = bitcast i8* %[[V6]] to i8**
+// ARM64: %[[V8:.*]] = bitcast i8** %[[V1]] to i8*
+// ARM64: %[[V9:.*]] = getelementptr inbounds i8, i8* %[[V8]], i64 8
+// ARM64: %[[V10:.*]] = bitcast i8* %[[V9]] to i8**
+// ARM64: %[[V11:.*]] = call i8* @objc_loadWeakRetained(i8** %[[V10]])
+// ARM64: %[[V12:.*]] = call i8* @objc_storeWeak(i8** %[[V7]], i8* %[[V11]])
+// ARM64: call void @objc_release(i8* %[[V11]])
+
+void test_copy_assignment_Weak(Weak *d, Weak *s) {
+ *d = *s;
+}
+
+// ARM64: define internal void @__Block_byref_object_copy_(i8*, i8*)
+// ARM64: call void @__move_constructor_8_8_t0w4_w8(i8** %{{.*}}, i8** %{{.*}})
+
+// ARM64: define linkonce_odr hidden void @__move_constructor_8_8_t0w4_w8(i8** %[[DST:.*]], i8** %[[SRC:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: %[[SRC_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: store i8** %[[SRC]], i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1:.*]] = load i8**, i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V2:.*]] = bitcast i8** %[[V0]] to i32*
+// ARM64: %[[V3:.*]] = bitcast i8** %[[V1]] to i32*
+// ARM64: %[[V4:.*]] = load i32, i32* %[[V3]], align 8
+// ARM64: store i32 %[[V4]], i32* %[[V2]], align 8
+// ARM64: %[[V5:.*]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V6:.*]] = getelementptr inbounds i8, i8* %[[V5]], i64 8
+// ARM64: %[[V7:.*]] = bitcast i8* %[[V6]] to i8**
+// ARM64: %[[V8:.*]] = bitcast i8** %[[V1]] to i8*
+// ARM64: %[[V9:.*]] = getelementptr inbounds i8, i8* %[[V8]], i64 8
+// ARM64: %[[V10:.*]] = bitcast i8* %[[V9]] to i8**
+// ARM64: call void @objc_moveWeak(i8** %[[V7]], i8** %[[V10]])
+
+void test_move_constructor_Weak(void) {
+ __block Weak t;
+ BlockTy b = ^{ (void)t; };
+}
+
+// ARM64: define void @test_move_assignment_Weak(%[[STRUCT_WEAK]]* %{{.*}})
+// ARM64: call void @__move_assignment_8_8_t0w4_w8(i8** %{{.*}}, i8** %{{.*}})
+
+// ARM64: define linkonce_odr hidden void @__move_assignment_8_8_t0w4_w8(i8** %[[DST:.*]], i8** %[[SRC:.*]])
+// ARM64: %[[DST_ADDR:.*]] = alloca i8**, align 8
+// ARM64: %[[SRC_ADDR:.*]] = alloca i8**, align 8
+// ARM64: store i8** %[[DST]], i8*** %[[DST_ADDR]], align 8
+// ARM64: store i8** %[[SRC]], i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V0:.*]] = load i8**, i8*** %[[DST_ADDR]], align 8
+// ARM64: %[[V1:.*]] = load i8**, i8*** %[[SRC_ADDR]], align 8
+// ARM64: %[[V2:.*]] = bitcast i8** %[[V0]] to i32*
+// ARM64: %[[V3:.*]] = bitcast i8** %[[V1]] to i32*
+// ARM64: %[[V4:.*]] = load i32, i32* %[[V3]], align 8
+// ARM64: store i32 %[[V4]], i32* %[[V2]], align 8
+// ARM64: %[[V5:.*]] = bitcast i8** %[[V0]] to i8*
+// ARM64: %[[V6:.*]] = getelementptr inbounds i8, i8* %[[V5]], i64 8
+// ARM64: %[[V7:.*]] = bitcast i8* %[[V6]] to i8**
+// ARM64: %[[V8:.*]] = bitcast i8** %[[V1]] to i8*
+// ARM64: %[[V9:.*]] = getelementptr inbounds i8, i8* %[[V8]], i64 8
+// ARM64: %[[V10:.*]] = bitcast i8* %[[V9]] to i8**
+// ARM64: %[[V11:.*]] = call i8* @objc_loadWeakRetained(i8** %[[V10]])
+// ARM64: %[[V12:.*]] = call i8* @objc_storeWeak(i8** %[[V7]], i8* %[[V11]])
+// ARM64: call void @objc_destroyWeak(i8** %[[V10]])
+// ARM64: call void @objc_release(i8* %[[V11]])
+
+void test_move_assignment_Weak(Weak *p) {
+ *p = getWeak();
+}
+
+// COMMON: define void @test_parameter_Weak(%[[STRUCT_WEAK]]* %[[A:.*]])
+// COMMON: %[[V0:.*]] = bitcast %[[STRUCT_WEAK]]* %[[A]] to i8**
+// COMMON: call void @__destructor_{{.*}}(i8** %[[V0]])
+
+void test_parameter_Weak(Weak a) {
+}
+
+// COMMON: define void @test_argument_Weak(%[[STRUCT_WEAK]]* %[[A:.*]])
+// COMMON: %[[A_ADDR:.*]] = alloca %[[STRUCT_WEAK]]*
+// COMMON: %[[AGG_TMP:.*]] = alloca %[[STRUCT_WEAK]]
+// COMMON: store %[[STRUCT_WEAK]]* %[[A]], %[[STRUCT_WEAK]]** %[[A_ADDR]]
+// COMMON: %[[V0:.*]] = load %[[STRUCT_WEAK]]*, %[[STRUCT_WEAK]]** %[[A_ADDR]]
+// COMMON: %[[V1:.*]] = bitcast %[[STRUCT_WEAK]]* %[[AGG_TMP]] to i8**
+// COMMON: %[[V2:.*]] = bitcast %[[STRUCT_WEAK]]* %[[V0]] to i8**
+// COMMON: call void @__copy_constructor_{{.*}}(i8** %[[V1]], i8** %[[V2]])
+// COMMON: call void @calleeWeak(%[[STRUCT_WEAK]]* %[[AGG_TMP]])
+// COMMON-NEXT: ret
+
+void test_argument_Weak(Weak *a) {
+ calleeWeak(*a);
+}
+
+// COMMON: define void @test_return_Weak(%[[STRUCT_WEAK]]* noalias sret %[[AGG_RESULT:.*]], %[[STRUCT_WEAK]]* %[[A:.*]])
+// COMMON: %[[A_ADDR:.*]] = alloca %[[STRUCT_WEAK]]*
+// COMMON: store %[[STRUCT_WEAK]]* %[[A]], %[[STRUCT_WEAK]]** %[[A_ADDR]]
+// COMMON: %[[V0:.*]] = load %[[STRUCT_WEAK]]*, %[[STRUCT_WEAK]]** %[[A_ADDR]]
+// COMMON: %[[V1:.*]] = bitcast %[[STRUCT_WEAK]]* %[[AGG_RESULT]] to i8**
+// COMMON: %[[V2:.*]] = bitcast %[[STRUCT_WEAK]]* %[[V0]] to i8**
+// COMMON: call void @__copy_constructor_{{.*}}(i8** %[[V1]], i8** %[[V2]])
+// COMMON: ret void
+
+Weak test_return_Weak(Weak *a) {
+ return *a;
+}
header "innerstructredef.h"
}
}
+
+module template_nontrivial0 {
+ header "template-nontrivial0.h"
+ export *
+}
+
+module template_nontrivial1 {
+ header "template-nontrivial1.h"
+ export *
+}
--- /dev/null
+template <class T>
+struct Class0 {
+ Class0();
+ Class0(const Class0<T> &);
+ ~Class0();
+ T *p;
+};
+
+struct S0 {
+ id x;
+};
+
+Class0<S0> returnNonTrivial();
--- /dev/null
+@import template_nontrivial0;
+
+struct S1 {
+ S1();
+ Class0<S0> a;
+};
static_assert(alignof(decltype(a)) == 2, "");
static_assert(alignof(decltype(b)) == 2, "");
}
+
+// Check that returnNonTrivial doesn't return Class0<S0> directly in registers.
+
+// CHECK: declare void @_Z16returnNonTrivialv(%struct.Class0* sret)
+
+@import template_nontrivial0;
+@import template_nontrivial1;
+
+S1::S1() : a(returnNonTrivial()) {
+}