зеркало из https://github.com/microsoft/clang-1.git
Validated by nightly-test runs on x86 and x86-64 darwin, including after
self-host. Hopefully these results hold up on different platforms. I tried to keep the GNU ObjC runtime happy, but it's hard for me to test. Reimplement how clang generates IR for exceptions. Instead of creating new invoke destinations which sequentially chain to the previous destination, push a more semantic representation of *why* we need the cleanup/catch/filter behavior, then collect that information into a single landing pad upon request. Also reorganizes how normal cleanups (i.e. cleanups triggered by non-exceptional control flow) are generated, since it's actually fairly closely tied in with the former. Remove the need to track which cleanup scope a block is associated with. Document a lot of previously poorly-understood (by me, at least) behavior. The new framework implements the Horrible Hack (tm), which requires every landing pad to have a catch-all so that inlining will work. Clang no longer requires the Horrible Hack just to make exceptions flow correctly within a function, however. The HH is an unfortunate requirement of LLVM's EH IR. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@107631 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Родитель
6c47a9b977
Коммит
f1549f66a8
|
@ -363,32 +363,7 @@ llvm::Value *CodeGenFunction::BuildBlockLiteralTmp(const BlockExpr *BE) {
|
|||
} else {
|
||||
if (BDRE->getCopyConstructorExpr()) {
|
||||
E = BDRE->getCopyConstructorExpr();
|
||||
// Code to destruct copy-constructed descriptor element for
|
||||
// copied-in class object.
|
||||
// TODO: Refactor this into common code with mostly similar
|
||||
// CodeGenFunction::EmitLocalBlockVarDecl
|
||||
QualType DtorTy = E->getType();
|
||||
if (const RecordType *RT = DtorTy->getAs<RecordType>())
|
||||
if (CXXRecordDecl *ClassDecl =
|
||||
dyn_cast<CXXRecordDecl>(RT->getDecl())) {
|
||||
if (!ClassDecl->hasTrivialDestructor()) {
|
||||
const CXXDestructorDecl *D = ClassDecl->getDestructor();
|
||||
assert(D && "BuildBlockLiteralTmp - destructor is nul");
|
||||
{
|
||||
// Normal destruction.
|
||||
DelayedCleanupBlock Scope(*this);
|
||||
EmitCXXDestructorCall(D, Dtor_Complete,
|
||||
/*ForVirtualBase=*/false, Addr);
|
||||
// Make sure to jump to the exit block.
|
||||
EmitBranch(Scope.getCleanupExitBlock());
|
||||
}
|
||||
if (Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
EmitCXXDestructorCall(D, Dtor_Complete,
|
||||
/*ForVirtualBase=*/false, Addr);
|
||||
}
|
||||
}
|
||||
}
|
||||
PushDestructorCleanup(E->getType(), Addr);
|
||||
}
|
||||
else {
|
||||
E = new (getContext()) DeclRefExpr(const_cast<ValueDecl*>(VD),
|
||||
|
|
|
@ -99,7 +99,7 @@ public:
|
|||
|
||||
llvm::Value *BlockObjectAssign;
|
||||
llvm::Value *BlockObjectDispose;
|
||||
const llvm::Type *PtrToInt8Ty;
|
||||
const llvm::PointerType *PtrToInt8Ty;
|
||||
|
||||
std::map<uint64_t, llvm::Constant *> AssignCache;
|
||||
std::map<uint64_t, llvm::Constant *> DestroyCache;
|
||||
|
|
|
@ -1075,6 +1075,24 @@ RValue CodeGenFunction::EmitCallArg(const Expr *E, QualType ArgType) {
|
|||
return EmitAnyExprToTemp(E);
|
||||
}
|
||||
|
||||
/// Emits a call or invoke instruction to the given function, depending
|
||||
/// on the current state of the EH stack.
|
||||
llvm::CallSite
|
||||
CodeGenFunction::EmitCallOrInvoke(llvm::Value *Callee,
|
||||
llvm::Value * const *ArgBegin,
|
||||
llvm::Value * const *ArgEnd,
|
||||
const llvm::Twine &Name) {
|
||||
llvm::BasicBlock *InvokeDest = getInvokeDest();
|
||||
if (!InvokeDest)
|
||||
return Builder.CreateCall(Callee, ArgBegin, ArgEnd, Name);
|
||||
|
||||
llvm::BasicBlock *ContBB = createBasicBlock("invoke.cont");
|
||||
llvm::InvokeInst *Invoke = Builder.CreateInvoke(Callee, ContBB, InvokeDest,
|
||||
ArgBegin, ArgEnd, Name);
|
||||
EmitBlock(ContBB);
|
||||
return Invoke;
|
||||
}
|
||||
|
||||
RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
|
||||
llvm::Value *Callee,
|
||||
ReturnValueSlot ReturnValue,
|
||||
|
@ -1206,15 +1224,18 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
|
|||
}
|
||||
|
||||
|
||||
llvm::BasicBlock *InvokeDest = getInvokeDest();
|
||||
unsigned CallingConv;
|
||||
CodeGen::AttributeListType AttributeList;
|
||||
CGM.ConstructAttributeList(CallInfo, TargetDecl, AttributeList, CallingConv);
|
||||
llvm::AttrListPtr Attrs = llvm::AttrListPtr::get(AttributeList.begin(),
|
||||
AttributeList.end());
|
||||
|
||||
llvm::BasicBlock *InvokeDest = 0;
|
||||
if (!(Attrs.getFnAttributes() & llvm::Attribute::NoUnwind))
|
||||
InvokeDest = getInvokeDest();
|
||||
|
||||
llvm::CallSite CS;
|
||||
if (!InvokeDest || (Attrs.getFnAttributes() & llvm::Attribute::NoUnwind)) {
|
||||
if (!InvokeDest) {
|
||||
CS = Builder.CreateCall(Callee, Args.data(), Args.data()+Args.size());
|
||||
} else {
|
||||
llvm::BasicBlock *Cont = createBasicBlock("invoke.cont");
|
||||
|
|
|
@ -340,7 +340,7 @@ static void EmitBaseInitializer(CodeGenFunction &CGF,
|
|||
|
||||
if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) {
|
||||
// FIXME: Is this OK for C++0x delegating constructors?
|
||||
CodeGenFunction::EHCleanupBlock Cleanup(CGF);
|
||||
CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
|
||||
|
||||
CXXDestructorDecl *DD = BaseClassDecl->getDestructor();
|
||||
CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V);
|
||||
|
@ -354,7 +354,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF,
|
|||
QualType T,
|
||||
unsigned Index) {
|
||||
if (Index == MemberInit->getNumArrayIndices()) {
|
||||
CodeGenFunction::CleanupScope Cleanups(CGF);
|
||||
CodeGenFunction::RunCleanupsScope Cleanups(CGF);
|
||||
|
||||
llvm::Value *Dest = LHS.getAddress();
|
||||
if (ArrayIndexVar) {
|
||||
|
@ -410,7 +410,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF,
|
|||
llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
|
||||
|
||||
{
|
||||
CodeGenFunction::CleanupScope Cleanups(CGF);
|
||||
CodeGenFunction::RunCleanupsScope Cleanups(CGF);
|
||||
|
||||
// Inside the loop body recurse to emit the inner loop or, eventually, the
|
||||
// constructor call.
|
||||
|
@ -534,7 +534,7 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
|
|||
CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
|
||||
if (!RD->hasTrivialDestructor()) {
|
||||
// FIXME: Is this OK for C++0x delegating constructors?
|
||||
CodeGenFunction::EHCleanupBlock Cleanup(CGF);
|
||||
CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
|
||||
|
||||
llvm::Value *ThisPtr = CGF.LoadCXXThis();
|
||||
LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
|
||||
|
@ -612,7 +612,7 @@ void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
|
|||
if (IsTryBody)
|
||||
TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body));
|
||||
|
||||
unsigned CleanupStackSize = CleanupEntries.size();
|
||||
EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
|
||||
|
||||
// Emit the constructor prologue, i.e. the base and member
|
||||
// initializers.
|
||||
|
@ -628,7 +628,7 @@ void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
|
|||
// initializers, which includes (along the exceptional path) the
|
||||
// destructors for those members and bases that were fully
|
||||
// constructed.
|
||||
EmitCleanupBlocks(CleanupStackSize);
|
||||
PopCleanupBlocks(CleanupDepth);
|
||||
|
||||
if (IsTryBody)
|
||||
ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo);
|
||||
|
@ -648,9 +648,6 @@ void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
|
|||
B != E; ++B) {
|
||||
CXXBaseOrMemberInitializer *Member = (*B);
|
||||
|
||||
assert(LiveTemporaries.empty() &&
|
||||
"Should not have any live temporaries at initializer start!");
|
||||
|
||||
if (Member->isBaseInitializer())
|
||||
EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
|
||||
else
|
||||
|
@ -659,12 +656,8 @@ void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
|
|||
|
||||
InitializeVTablePointers(ClassDecl);
|
||||
|
||||
for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) {
|
||||
assert(LiveTemporaries.empty() &&
|
||||
"Should not have any live temporaries at initializer start!");
|
||||
|
||||
for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
|
||||
EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
|
||||
}
|
||||
}
|
||||
|
||||
/// EmitDestructorBody - Emits the body of the current destructor.
|
||||
|
@ -684,8 +677,28 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
|
|||
if (isTryBody)
|
||||
TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body));
|
||||
|
||||
llvm::BasicBlock *DtorEpilogue = createBasicBlock("dtor.epilogue");
|
||||
PushCleanupBlock(DtorEpilogue);
|
||||
// Emit the destructor epilogue now. If this is a complete
|
||||
// destructor with a function-try-block, perform the base epilogue
|
||||
// as well.
|
||||
//
|
||||
// FIXME: This isn't really right, because an exception in the
|
||||
// non-EH epilogue should jump to the appropriate place in the
|
||||
// EH epilogue.
|
||||
{
|
||||
CleanupBlock Cleanup(*this, NormalCleanup);
|
||||
|
||||
if (isTryBody && DtorType == Dtor_Complete)
|
||||
EmitDtorEpilogue(Dtor, Dtor_Base);
|
||||
EmitDtorEpilogue(Dtor, DtorType);
|
||||
|
||||
if (Exceptions) {
|
||||
Cleanup.beginEHCleanup();
|
||||
|
||||
if (isTryBody && DtorType == Dtor_Complete)
|
||||
EmitDtorEpilogue(Dtor, Dtor_Base);
|
||||
EmitDtorEpilogue(Dtor, DtorType);
|
||||
}
|
||||
}
|
||||
|
||||
bool SkipBody = false; // should get jump-threaded
|
||||
|
||||
|
@ -724,23 +737,8 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
|
|||
// nothing to do besides what's in the epilogue
|
||||
}
|
||||
|
||||
// Jump to the cleanup block.
|
||||
CleanupBlockInfo Info = PopCleanupBlock();
|
||||
assert(Info.CleanupBlock == DtorEpilogue && "Block mismatch!");
|
||||
EmitBlock(DtorEpilogue);
|
||||
|
||||
// Emit the destructor epilogue now. If this is a complete
|
||||
// destructor with a function-try-block, perform the base epilogue
|
||||
// as well.
|
||||
if (isTryBody && DtorType == Dtor_Complete)
|
||||
EmitDtorEpilogue(Dtor, Dtor_Base);
|
||||
EmitDtorEpilogue(Dtor, DtorType);
|
||||
|
||||
// Link up the cleanup information.
|
||||
if (Info.SwitchBlock)
|
||||
EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
EmitBlock(Info.EndBlock);
|
||||
// We're done with the epilogue cleanup.
|
||||
PopCleanupBlock();
|
||||
|
||||
// Exit the try if applicable.
|
||||
if (isTryBody)
|
||||
|
@ -939,7 +937,7 @@ CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
|
|||
|
||||
// Keep track of the current number of live temporaries.
|
||||
{
|
||||
CXXTemporariesCleanupScope Scope(*this);
|
||||
RunCleanupsScope Scope(*this);
|
||||
|
||||
EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address,
|
||||
ArgBeg, ArgEnd);
|
||||
|
@ -1114,6 +1112,23 @@ void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
|
|||
EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
|
||||
}
|
||||
|
||||
void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
|
||||
CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
|
||||
if (!ClassDecl) return;
|
||||
if (ClassDecl->hasTrivialDestructor()) return;
|
||||
|
||||
const CXXDestructorDecl *D = ClassDecl->getDestructor();
|
||||
|
||||
CleanupBlock Scope(*this, NormalCleanup);
|
||||
|
||||
EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Addr);
|
||||
|
||||
if (Exceptions) {
|
||||
Scope.beginEHCleanup();
|
||||
EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Addr);
|
||||
}
|
||||
}
|
||||
|
||||
llvm::Value *
|
||||
CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
|
||||
const CXXRecordDecl *ClassDecl,
|
||||
|
|
|
@ -391,7 +391,8 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) {
|
|||
/// EmitLocalBlockVarDecl - Emit code and set up an entry in LocalDeclMap for a
|
||||
/// variable declaration with auto, register, or no storage class specifier.
|
||||
/// These turn into simple stack objects, or GlobalValues depending on target.
|
||||
void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
||||
void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
|
||||
SpecialInitFn *SpecialInit) {
|
||||
QualType Ty = D.getType();
|
||||
bool isByRef = D.hasAttr<BlocksAttr>();
|
||||
bool needsDispose = false;
|
||||
|
@ -489,7 +490,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
|
||||
{
|
||||
// Push a cleanup block and restore the stack there.
|
||||
DelayedCleanupBlock scope(*this);
|
||||
CleanupBlock scope(*this, NormalCleanup);
|
||||
|
||||
V = Builder.CreateLoad(Stack, "tmp");
|
||||
llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::stackrestore);
|
||||
|
@ -597,7 +598,9 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
}
|
||||
}
|
||||
|
||||
if (Init) {
|
||||
if (SpecialInit) {
|
||||
SpecialInit(*this, D, DeclPtr);
|
||||
} else if (Init) {
|
||||
llvm::Value *Loc = DeclPtr;
|
||||
if (isByRef)
|
||||
Loc = Builder.CreateStructGEP(DeclPtr, getByRefValueLLVMField(&D),
|
||||
|
@ -663,7 +666,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
EmitAggExpr(Init, Loc, isVolatile);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Handle CXX destruction of variables.
|
||||
QualType DtorTy(Ty);
|
||||
while (const ArrayType *Array = getContext().getAsArrayType(DtorTy))
|
||||
|
@ -683,20 +686,18 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
|
||||
if (const ConstantArrayType *Array =
|
||||
getContext().getAsConstantArrayType(Ty)) {
|
||||
{
|
||||
DelayedCleanupBlock Scope(*this);
|
||||
QualType BaseElementTy = getContext().getBaseElementType(Array);
|
||||
const llvm::Type *BasePtr = ConvertType(BaseElementTy);
|
||||
BasePtr = llvm::PointerType::getUnqual(BasePtr);
|
||||
llvm::Value *BaseAddrPtr =
|
||||
Builder.CreateBitCast(Loc, BasePtr);
|
||||
EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
|
||||
|
||||
// Make sure to jump to the exit block.
|
||||
EmitBranch(Scope.getCleanupExitBlock());
|
||||
}
|
||||
CleanupBlock Scope(*this, NormalCleanup);
|
||||
|
||||
QualType BaseElementTy = getContext().getBaseElementType(Array);
|
||||
const llvm::Type *BasePtr = ConvertType(BaseElementTy);
|
||||
BasePtr = llvm::PointerType::getUnqual(BasePtr);
|
||||
llvm::Value *BaseAddrPtr =
|
||||
Builder.CreateBitCast(Loc, BasePtr);
|
||||
EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
|
||||
|
||||
if (Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
Scope.beginEHCleanup();
|
||||
|
||||
QualType BaseElementTy = getContext().getBaseElementType(Array);
|
||||
const llvm::Type *BasePtr = ConvertType(BaseElementTy);
|
||||
BasePtr = llvm::PointerType::getUnqual(BasePtr);
|
||||
|
@ -705,30 +706,30 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
|
||||
}
|
||||
} else {
|
||||
{
|
||||
// Normal destruction.
|
||||
DelayedCleanupBlock Scope(*this);
|
||||
// Normal destruction.
|
||||
CleanupBlock Scope(*this, NormalCleanup);
|
||||
|
||||
if (NRVO) {
|
||||
// If we exited via NRVO, we skip the destructor call.
|
||||
llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused");
|
||||
Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"),
|
||||
Scope.getCleanupExitBlock(),
|
||||
NoNRVO);
|
||||
EmitBlock(NoNRVO);
|
||||
}
|
||||
|
||||
// We don't call the destructor along the normal edge if we're
|
||||
// applying the NRVO.
|
||||
EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false,
|
||||
Loc);
|
||||
|
||||
// Make sure to jump to the exit block.
|
||||
EmitBranch(Scope.getCleanupExitBlock());
|
||||
llvm::BasicBlock *SkipDtor = 0;
|
||||
if (NRVO) {
|
||||
// If we exited via NRVO, we skip the destructor call.
|
||||
llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused");
|
||||
SkipDtor = createBasicBlock("nrvo.skipdtor");
|
||||
Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"),
|
||||
SkipDtor,
|
||||
NoNRVO);
|
||||
EmitBlock(NoNRVO);
|
||||
}
|
||||
|
||||
// We don't call the destructor along the normal edge if we're
|
||||
// applying the NRVO.
|
||||
EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false,
|
||||
Loc);
|
||||
|
||||
if (NRVO) EmitBlock(SkipDtor);
|
||||
|
||||
// Along the exceptions path we always execute the dtor.
|
||||
if (Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
Scope.beginEHCleanup();
|
||||
EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false,
|
||||
Loc);
|
||||
}
|
||||
|
@ -752,17 +753,19 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
//
|
||||
// To fix this we insert a bitcast here.
|
||||
QualType ArgTy = Info.arg_begin()->type;
|
||||
{
|
||||
DelayedCleanupBlock scope(*this);
|
||||
|
||||
CallArgList Args;
|
||||
Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr,
|
||||
ConvertType(ArgTy))),
|
||||
getContext().getPointerType(D.getType())));
|
||||
EmitCall(Info, F, ReturnValueSlot(), Args);
|
||||
}
|
||||
CleanupBlock CleanupScope(*this, NormalCleanup);
|
||||
|
||||
// Normal cleanup.
|
||||
CallArgList Args;
|
||||
Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr,
|
||||
ConvertType(ArgTy))),
|
||||
getContext().getPointerType(D.getType())));
|
||||
EmitCall(Info, F, ReturnValueSlot(), Args);
|
||||
|
||||
// EH cleanup.
|
||||
if (Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
CleanupScope.beginEHCleanup();
|
||||
|
||||
CallArgList Args;
|
||||
Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr,
|
||||
|
@ -773,15 +776,16 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
|
|||
}
|
||||
|
||||
if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly) {
|
||||
{
|
||||
DelayedCleanupBlock scope(*this);
|
||||
llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding");
|
||||
V = Builder.CreateLoad(V);
|
||||
BuildBlockRelease(V);
|
||||
}
|
||||
CleanupBlock CleanupScope(*this, NormalCleanup);
|
||||
|
||||
llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding");
|
||||
V = Builder.CreateLoad(V);
|
||||
BuildBlockRelease(V);
|
||||
|
||||
// FIXME: Turn this on and audit the codegen
|
||||
if (0 && Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
CleanupScope.beginEHCleanup();
|
||||
|
||||
llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding");
|
||||
V = Builder.CreateLoad(V);
|
||||
BuildBlockRelease(V);
|
||||
|
|
|
@ -347,8 +347,6 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
|
|||
EmitBlock(InitCheckBlock);
|
||||
|
||||
// Variables used when coping with thread-safe statics and exceptions.
|
||||
llvm::BasicBlock *SavedLandingPad = 0;
|
||||
llvm::BasicBlock *LandingPad = 0;
|
||||
if (ThreadsafeStatics) {
|
||||
// Call __cxa_guard_acquire.
|
||||
V = Builder.CreateCall(getGuardAcquireFn(*this), GuardVariable);
|
||||
|
@ -358,10 +356,10 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
|
|||
Builder.CreateCondBr(Builder.CreateIsNotNull(V, "tobool"),
|
||||
InitBlock, EndBlock);
|
||||
|
||||
// Call __cxa_guard_abort along the exceptional edge.
|
||||
if (Exceptions) {
|
||||
SavedLandingPad = getInvokeDest();
|
||||
LandingPad = createBasicBlock("guard.lpad");
|
||||
setInvokeDest(LandingPad);
|
||||
CleanupBlock Cleanup(*this, EHCleanup);
|
||||
Builder.CreateCall(getGuardAbortFn(*this), GuardVariable);
|
||||
}
|
||||
|
||||
EmitBlock(InitBlock);
|
||||
|
@ -376,7 +374,7 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
|
|||
EmitDeclInit(*this, D, GV);
|
||||
|
||||
if (ThreadsafeStatics) {
|
||||
// Call __cxa_guard_release.
|
||||
// Call __cxa_guard_release. This cannot throw.
|
||||
Builder.CreateCall(getGuardReleaseFn(*this), GuardVariable);
|
||||
} else {
|
||||
llvm::Value *One =
|
||||
|
@ -388,58 +386,6 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
|
|||
if (!D.getType()->isReferenceType())
|
||||
EmitDeclDestroy(*this, D, GV);
|
||||
|
||||
if (ThreadsafeStatics && Exceptions) {
|
||||
// If an exception is thrown during initialization, call __cxa_guard_abort
|
||||
// along the exceptional edge.
|
||||
EmitBranch(EndBlock);
|
||||
|
||||
// Construct the landing pad.
|
||||
EmitBlock(LandingPad);
|
||||
|
||||
// Personality function and LLVM intrinsics.
|
||||
llvm::Constant *Personality =
|
||||
CGM.CreateRuntimeFunction(llvm::FunctionType::get(llvm::Type::getInt32Ty
|
||||
(VMContext),
|
||||
true),
|
||||
"__gxx_personality_v0");
|
||||
Personality = llvm::ConstantExpr::getBitCast(Personality, PtrToInt8Ty);
|
||||
llvm::Value *llvm_eh_exception =
|
||||
CGM.getIntrinsic(llvm::Intrinsic::eh_exception);
|
||||
llvm::Value *llvm_eh_selector =
|
||||
CGM.getIntrinsic(llvm::Intrinsic::eh_selector);
|
||||
|
||||
// Exception object
|
||||
llvm::Value *Exc = Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
llvm::Value *RethrowPtr = CreateTempAlloca(Exc->getType(), "_rethrow");
|
||||
|
||||
// Call the selector function.
|
||||
const llvm::PointerType *PtrToInt8Ty
|
||||
= llvm::PointerType::getUnqual(llvm::Type::getInt8Ty(VMContext));
|
||||
llvm::Constant *Null = llvm::ConstantPointerNull::get(PtrToInt8Ty);
|
||||
llvm::Value* SelectorArgs[3] = { Exc, Personality, Null };
|
||||
Builder.CreateCall(llvm_eh_selector, SelectorArgs, SelectorArgs + 3,
|
||||
"selector");
|
||||
Builder.CreateStore(Exc, RethrowPtr);
|
||||
|
||||
// Call __cxa_guard_abort along the exceptional edge.
|
||||
Builder.CreateCall(getGuardAbortFn(*this), GuardVariable);
|
||||
|
||||
setInvokeDest(SavedLandingPad);
|
||||
|
||||
// Rethrow the current exception.
|
||||
if (getInvokeDest()) {
|
||||
llvm::BasicBlock *Cont = createBasicBlock("invoke.cont");
|
||||
Builder.CreateInvoke(getUnwindResumeOrRethrowFn(), Cont,
|
||||
getInvokeDest(),
|
||||
Builder.CreateLoad(RethrowPtr));
|
||||
EmitBlock(Cont);
|
||||
} else
|
||||
Builder.CreateCall(getUnwindResumeOrRethrowFn(),
|
||||
Builder.CreateLoad(RethrowPtr));
|
||||
|
||||
Builder.CreateUnreachable();
|
||||
}
|
||||
|
||||
EmitBlock(EndBlock);
|
||||
}
|
||||
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,342 @@
|
|||
//===-- CGException.h - Classes for exceptions IR generation ----*- C++ -*-===//
|
||||
//
|
||||
// The LLVM Compiler Infrastructure
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
//
|
||||
// These classes support the generation of LLVM IR for exceptions in
|
||||
// C++ and Objective C.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#ifndef CLANG_CODEGEN_CGEXCEPTION_H
|
||||
#define CLANG_CODEGEN_CGEXCEPTION_H
|
||||
|
||||
/// EHScopeStack is defined in CodeGenFunction.h, but its
|
||||
/// implementation is in this file and in CGException.cpp.
|
||||
#include "CodeGenFunction.h"
|
||||
|
||||
namespace llvm {
|
||||
class Value;
|
||||
class BasicBlock;
|
||||
}
|
||||
|
||||
namespace clang {
|
||||
namespace CodeGen {
|
||||
|
||||
/// A protected scope for zero-cost EH handling.
|
||||
class EHScope {
|
||||
llvm::BasicBlock *CachedLandingPad;
|
||||
|
||||
unsigned K : 2;
|
||||
|
||||
protected:
|
||||
enum { BitsRemaining = 30 };
|
||||
|
||||
public:
|
||||
enum Kind { Cleanup, Catch, Terminate, Filter };
|
||||
|
||||
EHScope(Kind K) : CachedLandingPad(0), K(K) {}
|
||||
|
||||
Kind getKind() const { return static_cast<Kind>(K); }
|
||||
|
||||
llvm::BasicBlock *getCachedLandingPad() const {
|
||||
return CachedLandingPad;
|
||||
}
|
||||
|
||||
void setCachedLandingPad(llvm::BasicBlock *Block) {
|
||||
CachedLandingPad = Block;
|
||||
}
|
||||
};
|
||||
|
||||
/// A scope which attempts to handle some, possibly all, types of
|
||||
/// exceptions.
|
||||
///
|
||||
/// Objective C @finally blocks are represented using a cleanup scope
|
||||
/// after the catch scope.
|
||||
class EHCatchScope : public EHScope {
|
||||
unsigned NumHandlers : BitsRemaining;
|
||||
|
||||
// In effect, we have a flexible array member
|
||||
// Handler Handlers[0];
|
||||
// But that's only standard in C99, not C++, so we have to do
|
||||
// annoying pointer arithmetic instead.
|
||||
|
||||
public:
|
||||
struct Handler {
|
||||
/// A type info value, or null (C++ null, not an LLVM null pointer)
|
||||
/// for a catch-all.
|
||||
llvm::Value *Type;
|
||||
|
||||
/// The catch handler for this type.
|
||||
llvm::BasicBlock *Block;
|
||||
|
||||
static Handler make(llvm::Value *Type, llvm::BasicBlock *Block) {
|
||||
Handler Temp;
|
||||
Temp.Type = Type;
|
||||
Temp.Block = Block;
|
||||
return Temp;
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
Handler *getHandlers() {
|
||||
return reinterpret_cast<Handler*>(this+1);
|
||||
}
|
||||
|
||||
const Handler *getHandlers() const {
|
||||
return reinterpret_cast<const Handler*>(this+1);
|
||||
}
|
||||
|
||||
public:
|
||||
static size_t getSizeForNumHandlers(unsigned N) {
|
||||
return sizeof(EHCatchScope) + N * sizeof(Handler);
|
||||
}
|
||||
|
||||
EHCatchScope(unsigned NumHandlers)
|
||||
: EHScope(Catch), NumHandlers(NumHandlers) {
|
||||
}
|
||||
|
||||
unsigned getNumHandlers() const {
|
||||
return NumHandlers;
|
||||
}
|
||||
|
||||
void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
|
||||
setHandler(I, /*catchall*/ 0, Block);
|
||||
}
|
||||
|
||||
void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
|
||||
assert(I < getNumHandlers());
|
||||
getHandlers()[I] = Handler::make(Type, Block);
|
||||
}
|
||||
|
||||
const Handler &getHandler(unsigned I) const {
|
||||
assert(I < getNumHandlers());
|
||||
return getHandlers()[I];
|
||||
}
|
||||
|
||||
typedef const Handler *iterator;
|
||||
iterator begin() const { return getHandlers(); }
|
||||
iterator end() const { return getHandlers() + getNumHandlers(); }
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Catch;
|
||||
}
|
||||
};
|
||||
|
||||
/// A scope which needs to execute some code if we try to unwind ---
|
||||
/// either normally, via the EH mechanism, or both --- through it.
|
||||
class EHCleanupScope : public EHScope {
|
||||
/// The number of fixups required by enclosing scopes (not including
|
||||
/// this one). If this is the top cleanup scope, all the fixups
|
||||
/// from this index onwards belong to this scope.
|
||||
unsigned FixupDepth : BitsRemaining;
|
||||
|
||||
/// The nearest normal cleanup scope enclosing this one.
|
||||
EHScopeStack::stable_iterator EnclosingNormal;
|
||||
|
||||
/// The nearest EH cleanup scope enclosing this one.
|
||||
EHScopeStack::stable_iterator EnclosingEH;
|
||||
|
||||
llvm::BasicBlock *NormalEntry;
|
||||
llvm::BasicBlock *NormalExit;
|
||||
llvm::BasicBlock *EHEntry;
|
||||
llvm::BasicBlock *EHExit;
|
||||
|
||||
public:
|
||||
static size_t getSize() { return sizeof(EHCleanupScope); }
|
||||
|
||||
EHCleanupScope(unsigned FixupDepth,
|
||||
EHScopeStack::stable_iterator EnclosingNormal,
|
||||
EHScopeStack::stable_iterator EnclosingEH,
|
||||
llvm::BasicBlock *NormalEntry, llvm::BasicBlock *NormalExit,
|
||||
llvm::BasicBlock *EHEntry, llvm::BasicBlock *EHExit)
|
||||
: EHScope(Cleanup), FixupDepth(FixupDepth),
|
||||
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
|
||||
NormalEntry(NormalEntry), NormalExit(NormalExit),
|
||||
EHEntry(EHEntry), EHExit(EHExit) {
|
||||
assert((NormalEntry != 0) == (NormalExit != 0));
|
||||
assert((EHEntry != 0) == (EHExit != 0));
|
||||
}
|
||||
|
||||
bool isNormalCleanup() const { return NormalEntry != 0; }
|
||||
bool isEHCleanup() const { return EHEntry != 0; }
|
||||
|
||||
llvm::BasicBlock *getNormalEntry() const { return NormalEntry; }
|
||||
llvm::BasicBlock *getNormalExit() const { return NormalExit; }
|
||||
llvm::BasicBlock *getEHEntry() const { return EHEntry; }
|
||||
llvm::BasicBlock *getEHExit() const { return EHExit; }
|
||||
unsigned getFixupDepth() const { return FixupDepth; }
|
||||
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
|
||||
return EnclosingNormal;
|
||||
}
|
||||
EHScopeStack::stable_iterator getEnclosingEHCleanup() const {
|
||||
return EnclosingEH;
|
||||
}
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Cleanup;
|
||||
}
|
||||
};
|
||||
|
||||
/// An exceptions scope which filters exceptions thrown through it.
|
||||
/// Only exceptions matching the filter types will be permitted to be
|
||||
/// thrown.
|
||||
///
|
||||
/// This is used to implement C++ exception specifications.
|
||||
class EHFilterScope : public EHScope {
|
||||
unsigned NumFilters : BitsRemaining;
|
||||
|
||||
// Essentially ends in a flexible array member:
|
||||
// llvm::Value *FilterTypes[0];
|
||||
|
||||
llvm::Value **getFilters() {
|
||||
return reinterpret_cast<llvm::Value**>(this+1);
|
||||
}
|
||||
|
||||
llvm::Value * const *getFilters() const {
|
||||
return reinterpret_cast<llvm::Value* const *>(this+1);
|
||||
}
|
||||
|
||||
public:
|
||||
EHFilterScope(unsigned NumFilters) :
|
||||
EHScope(Filter), NumFilters(NumFilters) {}
|
||||
|
||||
static size_t getSizeForNumFilters(unsigned NumFilters) {
|
||||
return sizeof(EHFilterScope) + NumFilters * sizeof(llvm::Value*);
|
||||
}
|
||||
|
||||
unsigned getNumFilters() const { return NumFilters; }
|
||||
|
||||
void setFilter(unsigned I, llvm::Value *FilterValue) {
|
||||
assert(I < getNumFilters());
|
||||
getFilters()[I] = FilterValue;
|
||||
}
|
||||
|
||||
llvm::Value *getFilter(unsigned I) const {
|
||||
assert(I < getNumFilters());
|
||||
return getFilters()[I];
|
||||
}
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Filter;
|
||||
}
|
||||
};
|
||||
|
||||
/// An exceptions scope which calls std::terminate if any exception
|
||||
/// reaches it.
|
||||
class EHTerminateScope : public EHScope {
|
||||
public:
|
||||
EHTerminateScope() : EHScope(Terminate) {}
|
||||
static size_t getSize() { return sizeof(EHTerminateScope); }
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Terminate;
|
||||
}
|
||||
};
|
||||
|
||||
/// A non-stable pointer into the scope stack.
|
||||
class EHScopeStack::iterator {
|
||||
char *Ptr;
|
||||
|
||||
friend class EHScopeStack;
|
||||
explicit iterator(char *Ptr) : Ptr(Ptr) {}
|
||||
|
||||
public:
|
||||
iterator() : Ptr(0) {}
|
||||
|
||||
EHScope *get() const {
|
||||
return reinterpret_cast<EHScope*>(Ptr);
|
||||
}
|
||||
|
||||
EHScope *operator->() const { return get(); }
|
||||
EHScope &operator*() const { return *get(); }
|
||||
|
||||
iterator &operator++() {
|
||||
switch (get()->getKind()) {
|
||||
case EHScope::Catch:
|
||||
Ptr += EHCatchScope::getSizeForNumHandlers(
|
||||
static_cast<const EHCatchScope*>(get())->getNumHandlers());
|
||||
break;
|
||||
|
||||
case EHScope::Filter:
|
||||
Ptr += EHFilterScope::getSizeForNumFilters(
|
||||
static_cast<const EHFilterScope*>(get())->getNumFilters());
|
||||
break;
|
||||
|
||||
case EHScope::Cleanup:
|
||||
Ptr += EHCleanupScope::getSize();
|
||||
break;
|
||||
|
||||
case EHScope::Terminate:
|
||||
Ptr += EHTerminateScope::getSize();
|
||||
break;
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
iterator next() {
|
||||
iterator copy = *this;
|
||||
++copy;
|
||||
return copy;
|
||||
}
|
||||
|
||||
iterator operator++(int) {
|
||||
iterator copy = *this;
|
||||
operator++();
|
||||
return copy;
|
||||
}
|
||||
|
||||
bool operator==(iterator other) const { return Ptr == other.Ptr; }
|
||||
bool operator!=(iterator other) const { return Ptr != other.Ptr; }
|
||||
};
|
||||
|
||||
inline EHScopeStack::iterator EHScopeStack::begin() const {
|
||||
return iterator(StartOfData);
|
||||
}
|
||||
|
||||
inline EHScopeStack::iterator EHScopeStack::end() const {
|
||||
return iterator(EndOfBuffer);
|
||||
}
|
||||
|
||||
inline void EHScopeStack::popCatch() {
|
||||
assert(!empty() && "popping exception stack when not empty");
|
||||
|
||||
assert(isa<EHCatchScope>(*begin()));
|
||||
StartOfData += EHCatchScope::getSizeForNumHandlers(
|
||||
cast<EHCatchScope>(*begin()).getNumHandlers());
|
||||
|
||||
assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
|
||||
CatchDepth--;
|
||||
}
|
||||
|
||||
inline void EHScopeStack::popTerminate() {
|
||||
assert(!empty() && "popping exception stack when not empty");
|
||||
|
||||
assert(isa<EHTerminateScope>(*begin()));
|
||||
StartOfData += EHTerminateScope::getSize();
|
||||
|
||||
assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
|
||||
CatchDepth--;
|
||||
}
|
||||
|
||||
inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
|
||||
assert(sp.isValid() && "finding invalid savepoint");
|
||||
assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
|
||||
return iterator(EndOfBuffer - sp.Size);
|
||||
}
|
||||
|
||||
inline EHScopeStack::stable_iterator
|
||||
EHScopeStack::stabilize(iterator ir) const {
|
||||
assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
|
||||
return stable_iterator(EndOfBuffer - ir.Ptr);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
|
@ -201,7 +201,7 @@ EmitExprForReferenceBinding(CodeGenFunction& CGF, const Expr* E,
|
|||
E = DAE->getExpr();
|
||||
|
||||
if (const CXXExprWithTemporaries *TE = dyn_cast<CXXExprWithTemporaries>(E)) {
|
||||
CodeGenFunction::CXXTemporariesCleanupScope Scope(CGF);
|
||||
CodeGenFunction::RunCleanupsScope Scope(CGF);
|
||||
|
||||
return EmitExprForReferenceBinding(CGF, TE->getSubExpr(),
|
||||
ReferenceTemporary,
|
||||
|
@ -363,17 +363,12 @@ CodeGenFunction::EmitReferenceBindingToExpr(const Expr* E,
|
|||
}
|
||||
}
|
||||
|
||||
{
|
||||
DelayedCleanupBlock Scope(*this);
|
||||
EmitCXXDestructorCall(ReferenceTemporaryDtor, Dtor_Complete,
|
||||
/*ForVirtualBase=*/false, ReferenceTemporary);
|
||||
CleanupBlock Cleanup(*this, NormalCleanup);
|
||||
EmitCXXDestructorCall(ReferenceTemporaryDtor, Dtor_Complete,
|
||||
/*ForVirtualBase=*/false, ReferenceTemporary);
|
||||
|
||||
// Make sure to jump to the exit block.
|
||||
EmitBranch(Scope.getCleanupExitBlock());
|
||||
}
|
||||
|
||||
if (Exceptions) {
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
Cleanup.beginEHCleanup();
|
||||
EmitCXXDestructorCall(ReferenceTemporaryDtor, Dtor_Complete,
|
||||
/*ForVirtualBase=*/false, ReferenceTemporary);
|
||||
}
|
||||
|
@ -1947,7 +1942,7 @@ CodeGenFunction::EmitCXXTypeidLValue(const CXXTypeidExpr *E) {
|
|||
LValue
|
||||
CodeGenFunction::EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E) {
|
||||
LValue LV = EmitLValue(E->getSubExpr());
|
||||
PushCXXTemporary(E->getTemporary(), LV.getAddress());
|
||||
EmitCXXTemporary(E->getTemporary(), LV.getAddress());
|
||||
return LV;
|
||||
}
|
||||
|
||||
|
|
|
@ -532,7 +532,7 @@ void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
|
|||
|
||||
// Don't make this a live temporary if we're emitting an initializer expr.
|
||||
if (!IsInitializer)
|
||||
CGF.PushCXXTemporary(E->getTemporary(), Val);
|
||||
CGF.EmitCXXTemporary(E->getTemporary(), Val);
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -783,8 +783,8 @@ void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
|
|||
llvm::ConstantInt::get(UnsignedLongLTy, 1));
|
||||
Builder.CreateStore(Counter, CounterPtr);
|
||||
|
||||
llvm::BasicBlock *LoopEnd = createBasicBlock("loopend");
|
||||
llvm::BasicBlock *AfterBody = createBasicBlock("afterbody");
|
||||
JumpDest LoopEnd = getJumpDestInCurrentScope("loopend");
|
||||
JumpDest AfterBody = getJumpDestInCurrentScope("afterbody");
|
||||
|
||||
BreakContinueStack.push_back(BreakContinue(LoopEnd, AfterBody));
|
||||
|
||||
|
@ -792,7 +792,7 @@ void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
|
|||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
EmitBlock(AfterBody);
|
||||
EmitBlock(AfterBody.Block);
|
||||
|
||||
llvm::BasicBlock *FetchMore = createBasicBlock("fetchmore");
|
||||
|
||||
|
@ -828,11 +828,11 @@ void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
|
|||
LV.getAddress());
|
||||
}
|
||||
|
||||
EmitBlock(LoopEnd);
|
||||
EmitBlock(LoopEnd.Block);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitObjCAtTryStmt(const ObjCAtTryStmt &S) {
|
||||
CGM.getObjCRuntime().EmitTryOrSynchronizedStmt(*this, S);
|
||||
CGM.getObjCRuntime().EmitTryStmt(*this, S);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S) {
|
||||
|
@ -841,7 +841,9 @@ void CodeGenFunction::EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S) {
|
|||
|
||||
void CodeGenFunction::EmitObjCAtSynchronizedStmt(
|
||||
const ObjCAtSynchronizedStmt &S) {
|
||||
CGM.getObjCRuntime().EmitTryOrSynchronizedStmt(*this, S);
|
||||
CGM.getObjCRuntime().EmitSynchronizedStmt(*this, S);
|
||||
}
|
||||
|
||||
CGObjCRuntime::~CGObjCRuntime() {}
|
||||
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include "CGObjCRuntime.h"
|
||||
#include "CodeGenModule.h"
|
||||
#include "CodeGenFunction.h"
|
||||
#include "CGException.h"
|
||||
|
||||
#include "clang/AST/ASTContext.h"
|
||||
#include "clang/AST/Decl.h"
|
||||
|
@ -180,8 +181,10 @@ public:
|
|||
virtual llvm::Function *GetCopyStructFunction();
|
||||
virtual llvm::Constant *EnumerationMutationFunction();
|
||||
|
||||
virtual void EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S);
|
||||
virtual void EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S);
|
||||
virtual void EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S);
|
||||
virtual void EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtThrowStmt &S);
|
||||
virtual llvm::Value * EmitObjCWeakRead(CodeGen::CodeGenFunction &CGF,
|
||||
|
@ -1851,245 +1854,168 @@ llvm::Constant *CGObjCGNU::EnumerationMutationFunction() {
|
|||
return CGM.CreateRuntimeFunction(FTy, "objc_enumerationMutation");
|
||||
}
|
||||
|
||||
void CGObjCGNU::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S) {
|
||||
// Pointer to the personality function
|
||||
llvm::Constant *Personality =
|
||||
CGM.CreateRuntimeFunction(llvm::FunctionType::get(llvm::Type::getInt32Ty(VMContext),
|
||||
true),
|
||||
"__gnu_objc_personality_v0");
|
||||
Personality = llvm::ConstantExpr::getBitCast(Personality, PtrTy);
|
||||
std::vector<const llvm::Type*> Params;
|
||||
Params.push_back(PtrTy);
|
||||
llvm::Value *RethrowFn =
|
||||
CGM.CreateRuntimeFunction(llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext),
|
||||
Params, false), "_Unwind_Resume");
|
||||
void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S) {
|
||||
std::vector<const llvm::Type*> Args(1, IdTy);
|
||||
llvm::FunctionType *FTy =
|
||||
llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Args, false);
|
||||
|
||||
bool isTry = isa<ObjCAtTryStmt>(S);
|
||||
llvm::BasicBlock *TryBlock = CGF.createBasicBlock("try");
|
||||
llvm::BasicBlock *PrevLandingPad = CGF.getInvokeDest();
|
||||
llvm::BasicBlock *TryHandler = CGF.createBasicBlock("try.handler");
|
||||
llvm::BasicBlock *CatchInCatch = CGF.createBasicBlock("catch.rethrow");
|
||||
llvm::BasicBlock *FinallyBlock = CGF.createBasicBlock("finally");
|
||||
llvm::BasicBlock *FinallyRethrow = CGF.createBasicBlock("finally.throw");
|
||||
llvm::BasicBlock *FinallyEnd = CGF.createBasicBlock("finally.end");
|
||||
// Evaluate the lock operand. This should dominate the cleanup.
|
||||
llvm::Value *SyncArg =
|
||||
CGF.EmitScalarExpr(S.getSynchExpr());
|
||||
|
||||
// @synchronized()
|
||||
if (!isTry) {
|
||||
std::vector<const llvm::Type*> Args(1, IdTy);
|
||||
llvm::FunctionType *FTy =
|
||||
llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Args, false);
|
||||
llvm::Value *SyncEnter = CGM.CreateRuntimeFunction(FTy, "objc_sync_enter");
|
||||
llvm::Value *SyncArg =
|
||||
CGF.EmitScalarExpr(cast<ObjCAtSynchronizedStmt>(S).getSynchExpr());
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, IdTy);
|
||||
CGF.Builder.CreateCall(SyncEnter, SyncArg);
|
||||
}
|
||||
// Acquire the lock.
|
||||
llvm::Value *SyncEnter = CGM.CreateRuntimeFunction(FTy, "objc_sync_enter");
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, IdTy);
|
||||
CGF.Builder.CreateCall(SyncEnter, SyncArg);
|
||||
|
||||
// Register an all-paths cleanup to release the lock.
|
||||
{
|
||||
CodeGenFunction::CleanupBlock
|
||||
ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup);
|
||||
|
||||
// Push an EH context entry, used for handling rethrows and jumps
|
||||
// through finally.
|
||||
CGF.PushCleanupBlock(FinallyBlock);
|
||||
|
||||
// Emit the statements in the @try {} block
|
||||
CGF.setInvokeDest(TryHandler);
|
||||
|
||||
CGF.EmitBlock(TryBlock);
|
||||
CGF.EmitStmt(isTry ? cast<ObjCAtTryStmt>(S).getTryBody()
|
||||
: cast<ObjCAtSynchronizedStmt>(S).getSynchBody());
|
||||
|
||||
// Jump to @finally if there is no exception
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
|
||||
// Emit the handlers
|
||||
CGF.EmitBlock(TryHandler);
|
||||
|
||||
// Get the correct versions of the exception handling intrinsics
|
||||
llvm::Value *llvm_eh_exception =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_exception);
|
||||
llvm::Value *llvm_eh_selector =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_selector);
|
||||
llvm::Value *llvm_eh_typeid_for =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for);
|
||||
|
||||
// Exception object
|
||||
llvm::Value *Exc = CGF.Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
llvm::Value *RethrowPtr = CGF.CreateTempAlloca(Exc->getType(), "_rethrow");
|
||||
|
||||
llvm::SmallVector<llvm::Value*, 8> ESelArgs;
|
||||
llvm::SmallVector<std::pair<const VarDecl*, const Stmt*>, 8> Handlers;
|
||||
|
||||
ESelArgs.push_back(Exc);
|
||||
ESelArgs.push_back(Personality);
|
||||
|
||||
bool HasCatchAll = false;
|
||||
// Only @try blocks are allowed @catch blocks, but both can have @finally
|
||||
if (isTry) {
|
||||
if (cast<ObjCAtTryStmt>(S).getNumCatchStmts()) {
|
||||
const ObjCAtTryStmt &AtTry = cast<ObjCAtTryStmt>(S);
|
||||
CGF.setInvokeDest(CatchInCatch);
|
||||
|
||||
for (unsigned I = 0, N = AtTry.getNumCatchStmts(); I != N; ++I) {
|
||||
const ObjCAtCatchStmt *CatchStmt = AtTry.getCatchStmt(I);
|
||||
const VarDecl *CatchDecl = CatchStmt->getCatchParamDecl();
|
||||
Handlers.push_back(std::make_pair(CatchDecl,
|
||||
CatchStmt->getCatchBody()));
|
||||
|
||||
// @catch() and @catch(id) both catch any ObjC exception
|
||||
if (!CatchDecl || CatchDecl->getType()->isObjCIdType()
|
||||
|| CatchDecl->getType()->isObjCQualifiedIdType()) {
|
||||
// Use i8* null here to signal this is a catch all, not a cleanup.
|
||||
ESelArgs.push_back(NULLPtr);
|
||||
HasCatchAll = true;
|
||||
// No further catches after this one will ever by reached
|
||||
break;
|
||||
}
|
||||
|
||||
// All other types should be Objective-C interface pointer types.
|
||||
const ObjCObjectPointerType *OPT =
|
||||
CatchDecl->getType()->getAs<ObjCObjectPointerType>();
|
||||
assert(OPT && "Invalid @catch type.");
|
||||
const ObjCInterfaceDecl *IDecl =
|
||||
OPT->getObjectType()->getInterface();
|
||||
assert(IDecl && "Invalid @catch type.");
|
||||
llvm::Value *EHType =
|
||||
MakeConstantString(IDecl->getNameAsString());
|
||||
ESelArgs.push_back(EHType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We use a cleanup unless there was already a catch all.
|
||||
if (!HasCatchAll) {
|
||||
ESelArgs.push_back(llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext), 0));
|
||||
Handlers.push_back(std::make_pair((const ParmVarDecl*) 0, (const Stmt*) 0));
|
||||
}
|
||||
|
||||
// Find which handler was matched.
|
||||
llvm::Value *ESelector = CGF.Builder.CreateCall(llvm_eh_selector,
|
||||
ESelArgs.begin(), ESelArgs.end(), "selector");
|
||||
|
||||
for (unsigned i = 0, e = Handlers.size(); i != e; ++i) {
|
||||
const VarDecl *CatchParam = Handlers[i].first;
|
||||
const Stmt *CatchBody = Handlers[i].second;
|
||||
|
||||
llvm::BasicBlock *Next = 0;
|
||||
|
||||
// The last handler always matches.
|
||||
if (i + 1 != e) {
|
||||
assert(CatchParam && "Only last handler can be a catch all.");
|
||||
|
||||
// Test whether this block matches the type for the selector and branch
|
||||
// to Match if it does, or to the next BB if it doesn't.
|
||||
llvm::BasicBlock *Match = CGF.createBasicBlock("match");
|
||||
Next = CGF.createBasicBlock("catch.next");
|
||||
llvm::Value *Id = CGF.Builder.CreateCall(llvm_eh_typeid_for,
|
||||
CGF.Builder.CreateBitCast(ESelArgs[i+2], PtrTy));
|
||||
CGF.Builder.CreateCondBr(CGF.Builder.CreateICmpEQ(ESelector, Id), Match,
|
||||
Next);
|
||||
|
||||
CGF.EmitBlock(Match);
|
||||
}
|
||||
|
||||
if (CatchBody) {
|
||||
llvm::Value *ExcObject = CGF.Builder.CreateBitCast(Exc,
|
||||
CGF.ConvertType(CatchParam->getType()));
|
||||
|
||||
// Bind the catch parameter if it exists.
|
||||
if (CatchParam) {
|
||||
// CatchParam is a ParmVarDecl because of the grammar
|
||||
// construction used to handle this, but for codegen purposes
|
||||
// we treat this as a local decl.
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
CGF.Builder.CreateStore(ExcObject, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
}
|
||||
|
||||
CGF.ObjCEHValueStack.push_back(ExcObject);
|
||||
CGF.EmitStmt(CatchBody);
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
|
||||
if (Next)
|
||||
CGF.EmitBlock(Next);
|
||||
} else {
|
||||
assert(!Next && "catchup should be last handler.");
|
||||
|
||||
CGF.Builder.CreateStore(Exc, RethrowPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
}
|
||||
}
|
||||
// The @finally block is a secondary landing pad for any exceptions thrown in
|
||||
// @catch() blocks
|
||||
CGF.EmitBlock(CatchInCatch);
|
||||
Exc = CGF.Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
ESelArgs.clear();
|
||||
ESelArgs.push_back(Exc);
|
||||
ESelArgs.push_back(Personality);
|
||||
// If there is a @catch or @finally clause in outside of this one then we
|
||||
// need to make sure that we catch and rethrow it.
|
||||
if (PrevLandingPad) {
|
||||
ESelArgs.push_back(NULLPtr);
|
||||
} else {
|
||||
ESelArgs.push_back(llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext), 0));
|
||||
}
|
||||
CGF.Builder.CreateCall(llvm_eh_selector, ESelArgs.begin(), ESelArgs.end(),
|
||||
"selector");
|
||||
CGF.Builder.CreateCall(llvm_eh_typeid_for,
|
||||
CGF.Builder.CreateIntToPtr(ESelArgs[2], PtrTy));
|
||||
CGF.Builder.CreateStore(Exc, RethrowPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
|
||||
CodeGenFunction::CleanupBlockInfo Info = CGF.PopCleanupBlock();
|
||||
|
||||
CGF.setInvokeDest(PrevLandingPad);
|
||||
|
||||
CGF.EmitBlock(FinallyBlock);
|
||||
|
||||
|
||||
if (isTry) {
|
||||
if (const ObjCAtFinallyStmt* FinallyStmt =
|
||||
cast<ObjCAtTryStmt>(S).getFinallyStmt())
|
||||
CGF.EmitStmt(FinallyStmt->getFinallyBody());
|
||||
} else {
|
||||
// Emit 'objc_sync_exit(expr)' as finally's sole statement for
|
||||
// @synchronized.
|
||||
std::vector<const llvm::Type*> Args(1, IdTy);
|
||||
llvm::FunctionType *FTy =
|
||||
llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Args, false);
|
||||
llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit");
|
||||
llvm::Value *SyncArg =
|
||||
CGF.EmitScalarExpr(cast<ObjCAtSynchronizedStmt>(S).getSynchExpr());
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, IdTy);
|
||||
CGF.Builder.CreateCall(SyncExit, SyncArg);
|
||||
}
|
||||
|
||||
if (Info.SwitchBlock)
|
||||
CGF.EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
CGF.EmitBlock(Info.EndBlock);
|
||||
// Emit the body of the statement.
|
||||
CGF.EmitStmt(S.getSynchBody());
|
||||
|
||||
// Branch around the rethrow code.
|
||||
CGF.EmitBranch(FinallyEnd);
|
||||
// Pop the lock-release cleanup.
|
||||
CGF.PopCleanupBlock();
|
||||
}
|
||||
|
||||
CGF.EmitBlock(FinallyRethrow);
|
||||
namespace {
|
||||
struct CatchHandler {
|
||||
const VarDecl *Variable;
|
||||
const Stmt *Body;
|
||||
llvm::BasicBlock *Block;
|
||||
llvm::Value *TypeInfo;
|
||||
};
|
||||
}
|
||||
|
||||
llvm::Value *ExceptionObject = CGF.Builder.CreateLoad(RethrowPtr);
|
||||
llvm::BasicBlock *UnwindBB = CGF.getInvokeDest();
|
||||
if (!UnwindBB) {
|
||||
CGF.Builder.CreateCall(RethrowFn, ExceptionObject);
|
||||
// Exception always thrown, next instruction is never reached.
|
||||
CGF.Builder.CreateUnreachable();
|
||||
} else {
|
||||
// If there is a @catch block outside this scope, we invoke instead of
|
||||
// calling because we may return to this function. This is very slow, but
|
||||
// some people still do it. It would be nice to add an optimised path for
|
||||
// this.
|
||||
CGF.Builder.CreateInvoke(RethrowFn, UnwindBB, UnwindBB, &ExceptionObject,
|
||||
&ExceptionObject+1);
|
||||
void CGObjCGNU::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S) {
|
||||
// Unlike the Apple non-fragile runtimes, which also uses
|
||||
// unwind-based zero cost exceptions, the GNU Objective C runtime's
|
||||
// EH support isn't a veneer over C++ EH. Instead, exception
|
||||
// objects are created by __objc_exception_throw and destroyed by
|
||||
// the personality function; this avoids the need for bracketing
|
||||
// catch handlers with calls to __blah_begin_catch/__blah_end_catch
|
||||
// (or even _Unwind_DeleteException), but probably doesn't
|
||||
// interoperate very well with foreign exceptions.
|
||||
|
||||
// Jump destination for falling out of catch bodies.
|
||||
CodeGenFunction::JumpDest Cont;
|
||||
if (S.getNumCatchStmts())
|
||||
Cont = CGF.getJumpDestInCurrentScope("eh.cont");
|
||||
|
||||
// We handle @finally statements by pushing them as a cleanup
|
||||
// before entering the catch.
|
||||
CodeGenFunction::FinallyInfo FinallyInfo;
|
||||
if (const ObjCAtFinallyStmt *Finally = S.getFinallyStmt()) {
|
||||
std::vector<const llvm::Type*> Args(1, IdTy);
|
||||
llvm::FunctionType *FTy =
|
||||
llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Args, false);
|
||||
llvm::Constant *Rethrow =
|
||||
CGM.CreateRuntimeFunction(FTy, "objc_exception_throw");
|
||||
|
||||
FinallyInfo = CGF.EnterFinallyBlock(Finally->getFinallyBody(), 0, 0,
|
||||
Rethrow);
|
||||
}
|
||||
|
||||
CGF.EmitBlock(FinallyEnd);
|
||||
llvm::SmallVector<CatchHandler, 8> Handlers;
|
||||
|
||||
// Enter the catch, if there is one.
|
||||
if (S.getNumCatchStmts()) {
|
||||
for (unsigned I = 0, N = S.getNumCatchStmts(); I != N; ++I) {
|
||||
const ObjCAtCatchStmt *CatchStmt = S.getCatchStmt(I);
|
||||
const VarDecl *CatchDecl = CatchStmt->getCatchParamDecl();
|
||||
|
||||
Handlers.push_back(CatchHandler());
|
||||
CatchHandler &Handler = Handlers.back();
|
||||
Handler.Variable = CatchDecl;
|
||||
Handler.Body = CatchStmt->getCatchBody();
|
||||
Handler.Block = CGF.createBasicBlock("catch");
|
||||
|
||||
// @catch() and @catch(id) both catch any ObjC exception.
|
||||
// Treat them as catch-alls.
|
||||
// FIXME: this is what this code was doing before, but should 'id'
|
||||
// really be catching foreign exceptions?
|
||||
if (!CatchDecl
|
||||
|| CatchDecl->getType()->isObjCIdType()
|
||||
|| CatchDecl->getType()->isObjCQualifiedIdType()) {
|
||||
|
||||
Handler.TypeInfo = 0; // catch-all
|
||||
|
||||
// Don't consider any other catches.
|
||||
break;
|
||||
}
|
||||
|
||||
// All other types should be Objective-C interface pointer types.
|
||||
const ObjCObjectPointerType *OPT =
|
||||
CatchDecl->getType()->getAs<ObjCObjectPointerType>();
|
||||
assert(OPT && "Invalid @catch type.");
|
||||
const ObjCInterfaceDecl *IDecl =
|
||||
OPT->getObjectType()->getInterface();
|
||||
assert(IDecl && "Invalid @catch type.");
|
||||
Handler.TypeInfo = MakeConstantString(IDecl->getNameAsString());
|
||||
}
|
||||
|
||||
EHCatchScope *Catch = CGF.EHStack.pushCatch(Handlers.size());
|
||||
for (unsigned I = 0, E = Handlers.size(); I != E; ++I)
|
||||
Catch->setHandler(I, Handlers[I].TypeInfo, Handlers[I].Block);
|
||||
}
|
||||
|
||||
// Emit the try body.
|
||||
CGF.EmitStmt(S.getTryBody());
|
||||
|
||||
// Leave the try.
|
||||
if (S.getNumCatchStmts())
|
||||
CGF.EHStack.popCatch();
|
||||
|
||||
// Remember where we were.
|
||||
CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
|
||||
|
||||
// Emit the handlers.
|
||||
for (unsigned I = 0, E = Handlers.size(); I != E; ++I) {
|
||||
CatchHandler &Handler = Handlers[I];
|
||||
CGF.EmitBlock(Handler.Block);
|
||||
|
||||
llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot());
|
||||
|
||||
// Bind the catch parameter if it exists.
|
||||
if (const VarDecl *CatchParam = Handler.Variable) {
|
||||
const llvm::Type *CatchType = CGF.ConvertType(CatchParam->getType());
|
||||
Exn = CGF.Builder.CreateBitCast(Exn, CatchType);
|
||||
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
CGF.Builder.CreateStore(Exn, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
}
|
||||
|
||||
CGF.ObjCEHValueStack.push_back(Exn);
|
||||
CGF.EmitStmt(Handler.Body);
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
|
||||
CGF.EmitBranchThroughCleanup(Cont);
|
||||
}
|
||||
|
||||
// Go back to the try-statement fallthrough.
|
||||
CGF.Builder.restoreIP(SavedIP);
|
||||
|
||||
// Pop out of the finally.
|
||||
if (S.getFinallyStmt())
|
||||
CGF.ExitFinallyBlock(FinallyInfo);
|
||||
|
||||
if (Cont.Block) {
|
||||
if (Cont.Block->use_empty())
|
||||
delete Cont.Block;
|
||||
else {
|
||||
CGF.EmitBranch(Cont.Block);
|
||||
CGF.EmitBlock(Cont.Block);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void CGObjCGNU::EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include "CGRecordLayout.h"
|
||||
#include "CodeGenModule.h"
|
||||
#include "CodeGenFunction.h"
|
||||
#include "CGException.h"
|
||||
#include "clang/AST/ASTContext.h"
|
||||
#include "clang/AST/Decl.h"
|
||||
#include "clang/AST/DeclObjC.h"
|
||||
|
@ -1184,8 +1185,11 @@ public:
|
|||
virtual llvm::Constant *GetCopyStructFunction();
|
||||
virtual llvm::Constant *EnumerationMutationFunction();
|
||||
|
||||
virtual void EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S);
|
||||
virtual void EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S);
|
||||
virtual void EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S);
|
||||
void EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF, const Stmt &S);
|
||||
virtual void EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtThrowStmt &S);
|
||||
virtual llvm::Value * EmitObjCWeakRead(CodeGen::CodeGenFunction &CGF,
|
||||
|
@ -1428,8 +1432,10 @@ public:
|
|||
return ObjCTypes.getEnumerationMutationFn();
|
||||
}
|
||||
|
||||
virtual void EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S);
|
||||
virtual void EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S);
|
||||
virtual void EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S);
|
||||
virtual void EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtThrowStmt &S);
|
||||
virtual llvm::Value * EmitObjCWeakRead(CodeGen::CodeGenFunction &CGF,
|
||||
|
@ -2525,11 +2531,52 @@ llvm::Constant *CGObjCMac::EnumerationMutationFunction() {
|
|||
return ObjCTypes.getEnumerationMutationFn();
|
||||
}
|
||||
|
||||
void CGObjCMac::EmitTryStmt(CodeGenFunction &CGF, const ObjCAtTryStmt &S) {
|
||||
return EmitTryOrSynchronizedStmt(CGF, S);
|
||||
}
|
||||
|
||||
void CGObjCMac::EmitSynchronizedStmt(CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S) {
|
||||
return EmitTryOrSynchronizedStmt(CGF, S);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
Objective-C setjmp-longjmp (sjlj) Exception Handling
|
||||
--
|
||||
|
||||
A catch buffer is a setjmp buffer plus:
|
||||
- a pointer to the exception that was caught
|
||||
- a pointer to the previous exception data buffer
|
||||
- two pointers of reserved storage
|
||||
Therefore catch buffers form a stack, with a pointer to the top
|
||||
of the stack kept in thread-local storage.
|
||||
|
||||
objc_exception_try_enter pushes a catch buffer onto the EH stack.
|
||||
objc_exception_try_exit pops the given catch buffer, which is
|
||||
required to be the top of the EH stack.
|
||||
objc_exception_throw pops the top of the EH stack, writes the
|
||||
thrown exception into the appropriate field, and longjmps
|
||||
to the setjmp buffer. It crashes the process (with a printf
|
||||
and an abort()) if there are no catch buffers on the stack.
|
||||
objc_exception_extract just reads the exception pointer out of the
|
||||
catch buffer.
|
||||
|
||||
There's no reason an implementation couldn't use a light-weight
|
||||
setjmp here --- something like __builtin_setjmp, but API-compatible
|
||||
with the heavyweight setjmp. This will be more important if we ever
|
||||
want to implement correct ObjC/C++ exception interactions for the
|
||||
fragile ABI.
|
||||
|
||||
Note that for this use of setjmp/longjmp to be correct, we may need
|
||||
to mark some local variables volatile: if a non-volatile local
|
||||
variable is modified between the setjmp and the longjmp, it has
|
||||
indeterminate value. For the purposes of LLVM IR, it may be
|
||||
sufficient to make loads and stores within the @try (to variables
|
||||
declared outside the @try) volatile. This is necessary for
|
||||
optimized correctness, but is not currently being done; this is
|
||||
being tracked as rdar://problem/8160285
|
||||
|
||||
The basic framework for a @try-catch-finally is as follows:
|
||||
{
|
||||
objc_exception_data d;
|
||||
|
@ -2591,37 +2638,33 @@ llvm::Constant *CGObjCMac::EnumerationMutationFunction() {
|
|||
Rethrows and Jumps-Through-Finally
|
||||
--
|
||||
|
||||
Support for implicit rethrows and jumping through the finally block is
|
||||
handled by storing the current exception-handling context in
|
||||
ObjCEHStack.
|
||||
'@throw;' is supported by pushing the currently-caught exception
|
||||
onto ObjCEHStack while the @catch blocks are emitted.
|
||||
|
||||
In order to implement proper @finally semantics, we support one basic
|
||||
mechanism for jumping through the finally block to an arbitrary
|
||||
destination. Constructs which generate exits from a @try or @catch
|
||||
block use this mechanism to implement the proper semantics by chaining
|
||||
jumps, as necessary.
|
||||
Branches through the @finally block are handled with an ordinary
|
||||
normal cleanup. We do not register an EH cleanup; fragile-ABI ObjC
|
||||
exceptions are not compatible with C++ exceptions, and this is
|
||||
hardly the only place where this will go wrong.
|
||||
|
||||
This mechanism works like the one used for indirect goto: we
|
||||
arbitrarily assign an ID to each destination and store the ID for the
|
||||
destination in a variable prior to entering the finally block. At the
|
||||
end of the finally block we simply create a switch to the proper
|
||||
destination.
|
||||
|
||||
Code gen for @synchronized(expr) stmt;
|
||||
Effectively generating code for:
|
||||
objc_sync_enter(expr);
|
||||
@try stmt @finally { objc_sync_exit(expr); }
|
||||
@synchronized(expr) { stmt; } is emitted as if it were:
|
||||
id synch_value = expr;
|
||||
objc_sync_enter(synch_value);
|
||||
@try { stmt; } @finally { objc_sync_exit(synch_value); }
|
||||
*/
|
||||
|
||||
void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S) {
|
||||
bool isTry = isa<ObjCAtTryStmt>(S);
|
||||
// Create various blocks we refer to for handling @finally.
|
||||
llvm::BasicBlock *FinallyBlock = CGF.createBasicBlock("finally");
|
||||
llvm::BasicBlock *FinallyExit = CGF.createBasicBlock("finally.exit");
|
||||
llvm::BasicBlock *FinallyNoExit = CGF.createBasicBlock("finally.noexit");
|
||||
llvm::BasicBlock *FinallyRethrow = CGF.createBasicBlock("finally.throw");
|
||||
llvm::BasicBlock *FinallyEnd = CGF.createBasicBlock("finally.end");
|
||||
|
||||
// A destination for the fall-through edges of the catch handlers to
|
||||
// jump to.
|
||||
CodeGenFunction::JumpDest FinallyEnd =
|
||||
CGF.getJumpDestInCurrentScope("finally.end");
|
||||
|
||||
// A destination for the rethrow edge of the catch handlers to jump
|
||||
// to.
|
||||
CodeGenFunction::JumpDest FinallyRethrow =
|
||||
CGF.getJumpDestInCurrentScope("finally.rethrow");
|
||||
|
||||
// For @synchronized, call objc_sync_enter(sync.expr). The
|
||||
// evaluation of the expression must occur before we enter the
|
||||
|
@ -2632,75 +2675,140 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
SyncArg =
|
||||
CGF.EmitScalarExpr(cast<ObjCAtSynchronizedStmt>(S).getSynchExpr());
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, ObjCTypes.ObjectPtrTy);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncEnterFn(), SyncArg);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncEnterFn(), SyncArg)
|
||||
->setDoesNotThrow();
|
||||
}
|
||||
|
||||
// Push an EH context entry, used for handling rethrows and jumps
|
||||
// through finally.
|
||||
CGF.PushCleanupBlock(FinallyBlock);
|
||||
|
||||
if (CGF.ObjCEHValueStack.empty())
|
||||
CGF.ObjCEHValueStack.push_back(0);
|
||||
// If This is a nested @try, caught exception is that of enclosing @try.
|
||||
else
|
||||
CGF.ObjCEHValueStack.push_back(CGF.ObjCEHValueStack.back());
|
||||
// Allocate memory for the exception data and rethrow pointer.
|
||||
llvm::Value *ExceptionData = CGF.CreateTempAlloca(ObjCTypes.ExceptionDataTy,
|
||||
"exceptiondata.ptr");
|
||||
llvm::Value *RethrowPtr = CGF.CreateTempAlloca(ObjCTypes.ObjectPtrTy,
|
||||
"_rethrow");
|
||||
llvm::Value *CallTryExitPtr = CGF.CreateTempAlloca(
|
||||
llvm::Type::getInt1Ty(VMContext),
|
||||
|
||||
// Create a flag indicating whether the cleanup needs to call
|
||||
// objc_exception_try_exit. This is true except when
|
||||
// - no catches match and we're branching through the cleanup
|
||||
// just to rethrow the exception, or
|
||||
// - a catch matched and we're falling out of the catch handler.
|
||||
llvm::Value *CallTryExitVar = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(),
|
||||
"_call_try_exit");
|
||||
CGF.Builder.CreateStore(llvm::ConstantInt::getTrue(VMContext),
|
||||
CallTryExitPtr);
|
||||
CallTryExitVar);
|
||||
|
||||
// Enter a new try block and call setjmp.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryEnterFn(), ExceptionData);
|
||||
llvm::Value *JmpBufPtr = CGF.Builder.CreateStructGEP(ExceptionData, 0,
|
||||
"jmpbufarray");
|
||||
JmpBufPtr = CGF.Builder.CreateStructGEP(JmpBufPtr, 0, "tmp");
|
||||
llvm::Value *SetJmpResult = CGF.Builder.CreateCall(ObjCTypes.getSetJmpFn(),
|
||||
JmpBufPtr, "result");
|
||||
// Push a normal cleanup to leave the try scope.
|
||||
{
|
||||
CodeGenFunction::CleanupBlock
|
||||
FinallyScope(CGF, CodeGenFunction::NormalCleanup);
|
||||
|
||||
// Check whether we need to call objc_exception_try_exit.
|
||||
// In optimized code, this branch will always be folded.
|
||||
llvm::BasicBlock *FinallyCallExit =
|
||||
CGF.createBasicBlock("finally.call_exit");
|
||||
llvm::BasicBlock *FinallyNoCallExit =
|
||||
CGF.createBasicBlock("finally.no_call_exit");
|
||||
CGF.Builder.CreateCondBr(CGF.Builder.CreateLoad(CallTryExitVar),
|
||||
FinallyCallExit, FinallyNoCallExit);
|
||||
|
||||
CGF.EmitBlock(FinallyCallExit);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryExitFn(), ExceptionData)
|
||||
->setDoesNotThrow();
|
||||
|
||||
CGF.EmitBlock(FinallyNoCallExit);
|
||||
|
||||
if (isTry) {
|
||||
if (const ObjCAtFinallyStmt* FinallyStmt =
|
||||
cast<ObjCAtTryStmt>(S).getFinallyStmt())
|
||||
CGF.EmitStmt(FinallyStmt->getFinallyBody());
|
||||
|
||||
// ~CleanupBlock requires there to be an exit block.
|
||||
CGF.EnsureInsertPoint();
|
||||
} else {
|
||||
// Emit objc_sync_exit(expr); as finally's sole statement for
|
||||
// @synchronized.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg)
|
||||
->setDoesNotThrow();
|
||||
}
|
||||
}
|
||||
|
||||
// Enter a try block:
|
||||
// - Call objc_exception_try_enter to push ExceptionData on top of
|
||||
// the EH stack.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryEnterFn(), ExceptionData)
|
||||
->setDoesNotThrow();
|
||||
|
||||
// - Call setjmp on the exception data buffer.
|
||||
llvm::Constant *Zero = llvm::ConstantInt::get(CGF.Builder.getInt32Ty(), 0);
|
||||
llvm::Value *GEPIndexes[] = { Zero, Zero, Zero };
|
||||
llvm::Value *SetJmpBuffer =
|
||||
CGF.Builder.CreateGEP(ExceptionData, GEPIndexes, GEPIndexes+3, "setjmp_buffer");
|
||||
llvm::CallInst *SetJmpResult =
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSetJmpFn(), SetJmpBuffer, "setjmp_result");
|
||||
SetJmpResult->setDoesNotThrow();
|
||||
|
||||
// If setjmp returned 0, enter the protected block; otherwise,
|
||||
// branch to the handler.
|
||||
llvm::BasicBlock *TryBlock = CGF.createBasicBlock("try");
|
||||
llvm::BasicBlock *TryHandler = CGF.createBasicBlock("try.handler");
|
||||
CGF.Builder.CreateCondBr(CGF.Builder.CreateIsNotNull(SetJmpResult, "threw"),
|
||||
TryHandler, TryBlock);
|
||||
llvm::Value *DidCatch =
|
||||
CGF.Builder.CreateIsNull(SetJmpResult, "did_catch_exception");
|
||||
CGF.Builder.CreateCondBr(DidCatch, TryBlock, TryHandler);
|
||||
|
||||
// Emit the @try block.
|
||||
// Emit the protected block.
|
||||
CGF.EmitBlock(TryBlock);
|
||||
CGF.EmitStmt(isTry ? cast<ObjCAtTryStmt>(S).getTryBody()
|
||||
: cast<ObjCAtSynchronizedStmt>(S).getSynchBody());
|
||||
: cast<ObjCAtSynchronizedStmt>(S).getSynchBody());
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
|
||||
// Emit the "exception in @try" block.
|
||||
// Emit the exception handler block.
|
||||
CGF.EmitBlock(TryHandler);
|
||||
|
||||
// Retrieve the exception object. We may emit multiple blocks but
|
||||
// nothing can cross this so the value is already in SSA form.
|
||||
llvm::Value *Caught =
|
||||
llvm::CallInst *Caught =
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionExtractFn(),
|
||||
ExceptionData, "caught");
|
||||
CGF.ObjCEHValueStack.back() = Caught;
|
||||
if (!isTry) {
|
||||
CGF.Builder.CreateStore(Caught, RethrowPtr);
|
||||
Caught->setDoesNotThrow();
|
||||
|
||||
// Remember the exception to rethrow.
|
||||
CGF.Builder.CreateStore(Caught, RethrowPtr);
|
||||
|
||||
// Note: at this point, objc_exception_throw already popped the
|
||||
// catch handler, so anything that branches to the cleanup needs
|
||||
// to set CallTryExitVar to false.
|
||||
|
||||
// For a @synchronized (or a @try with no catches), just branch
|
||||
// through the cleanup to the rethrow block.
|
||||
if (!isTry || !cast<ObjCAtTryStmt>(S).getNumCatchStmts()) {
|
||||
// Tell the cleanup not to re-pop the exit.
|
||||
CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(VMContext),
|
||||
CallTryExitPtr);
|
||||
CallTryExitVar);
|
||||
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
} else if (cast<ObjCAtTryStmt>(S).getNumCatchStmts()) {
|
||||
|
||||
// Otherwise, we have to match against the caught exceptions.
|
||||
} else {
|
||||
// Push the exception to rethrow onto the EH value stack for the
|
||||
// benefit of any @throws in the handlers.
|
||||
CGF.ObjCEHValueStack.push_back(Caught);
|
||||
|
||||
const ObjCAtTryStmt* AtTryStmt = cast<ObjCAtTryStmt>(&S);
|
||||
|
||||
// Enter a new exception try block (in case a @catch block throws
|
||||
// an exception).
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryEnterFn(), ExceptionData);
|
||||
// an exception). Now CallTryExitVar (currently true) is back in
|
||||
// synch with reality.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryEnterFn(), ExceptionData)
|
||||
->setDoesNotThrow();
|
||||
|
||||
llvm::Value *SetJmpResult = CGF.Builder.CreateCall(ObjCTypes.getSetJmpFn(),
|
||||
JmpBufPtr, "result");
|
||||
llvm::Value *Threw = CGF.Builder.CreateIsNotNull(SetJmpResult, "threw");
|
||||
llvm::CallInst *SetJmpResult =
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSetJmpFn(), SetJmpBuffer,
|
||||
"setjmp.result");
|
||||
SetJmpResult->setDoesNotThrow();
|
||||
|
||||
llvm::Value *Threw =
|
||||
CGF.Builder.CreateIsNotNull(SetJmpResult, "did_catch_exception");
|
||||
|
||||
llvm::BasicBlock *CatchBlock = CGF.createBasicBlock("catch");
|
||||
llvm::BasicBlock *CatchHandler = CGF.createBasicBlock("catch.handler");
|
||||
llvm::BasicBlock *CatchHandler = CGF.createBasicBlock("catch_for_catch");
|
||||
CGF.Builder.CreateCondBr(Threw, CatchHandler, CatchBlock);
|
||||
|
||||
CGF.EmitBlock(CatchBlock);
|
||||
|
@ -2711,7 +2819,6 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
bool AllMatched = false;
|
||||
for (unsigned I = 0, N = AtTryStmt->getNumCatchStmts(); I != N; ++I) {
|
||||
const ObjCAtCatchStmt *CatchStmt = AtTryStmt->getCatchStmt(I);
|
||||
llvm::BasicBlock *NextCatchBlock = CGF.createBasicBlock("catch");
|
||||
|
||||
const VarDecl *CatchParam = CatchStmt->getCatchParamDecl();
|
||||
const ObjCObjectPointerType *OPT = 0;
|
||||
|
@ -2722,47 +2829,67 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
} else {
|
||||
OPT = CatchParam->getType()->getAs<ObjCObjectPointerType>();
|
||||
|
||||
// catch(id e) always matches.
|
||||
// catch(id e) always matches under this ABI, since only
|
||||
// ObjC exceptions end up here in the first place.
|
||||
// FIXME: For the time being we also match id<X>; this should
|
||||
// be rejected by Sema instead.
|
||||
if (OPT && (OPT->isObjCIdType() || OPT->isObjCQualifiedIdType()))
|
||||
AllMatched = true;
|
||||
}
|
||||
|
||||
// If this is a catch-all, we don't need to test anything.
|
||||
if (AllMatched) {
|
||||
CodeGenFunction::RunCleanupsScope CatchVarCleanups(CGF);
|
||||
|
||||
if (CatchParam) {
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
assert(CGF.HaveInsertPoint() && "DeclStmt destroyed insert point?");
|
||||
|
||||
// These types work out because ConvertType(id) == i8*.
|
||||
CGF.Builder.CreateStore(Caught, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
}
|
||||
|
||||
CGF.EmitStmt(CatchStmt->getCatchBody());
|
||||
|
||||
// The scope of the catch variable ends right here.
|
||||
CatchVarCleanups.ForceCleanup();
|
||||
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
break;
|
||||
}
|
||||
|
||||
assert(OPT && "Unexpected non-object pointer type in @catch");
|
||||
const ObjCObjectType *ObjTy = OPT->getObjectType();
|
||||
|
||||
// FIXME: @catch (Class c) ?
|
||||
ObjCInterfaceDecl *IDecl = ObjTy->getInterface();
|
||||
assert(IDecl && "Catch parameter must have Objective-C type!");
|
||||
|
||||
// Check if the @catch block matches the exception object.
|
||||
llvm::Value *Class = EmitClassRef(CGF.Builder, IDecl);
|
||||
|
||||
llvm::Value *Match =
|
||||
llvm::CallInst *Match =
|
||||
CGF.Builder.CreateCall2(ObjCTypes.getExceptionMatchFn(),
|
||||
Class, Caught, "match");
|
||||
Match->setDoesNotThrow();
|
||||
|
||||
llvm::BasicBlock *MatchedBlock = CGF.createBasicBlock("matched");
|
||||
llvm::BasicBlock *MatchedBlock = CGF.createBasicBlock("match");
|
||||
llvm::BasicBlock *NextCatchBlock = CGF.createBasicBlock("catch.next");
|
||||
|
||||
CGF.Builder.CreateCondBr(CGF.Builder.CreateIsNotNull(Match, "matched"),
|
||||
MatchedBlock, NextCatchBlock);
|
||||
|
||||
// Emit the @catch block.
|
||||
CGF.EmitBlock(MatchedBlock);
|
||||
|
||||
// Collect any cleanups for the catch variable. The scope lasts until
|
||||
// the end of the catch body.
|
||||
CodeGenFunction::RunCleanupsScope CatchVarCleanups(CGF);
|
||||
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
assert(CGF.HaveInsertPoint() && "DeclStmt destroyed insert point?");
|
||||
|
||||
// Initialize the catch variable.
|
||||
llvm::Value *Tmp =
|
||||
CGF.Builder.CreateBitCast(Caught,
|
||||
CGF.ConvertType(CatchParam->getType()),
|
||||
|
@ -2770,11 +2897,17 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
CGF.Builder.CreateStore(Tmp, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
|
||||
CGF.EmitStmt(CatchStmt->getCatchBody());
|
||||
|
||||
// We're done with the catch variable.
|
||||
CatchVarCleanups.ForceCleanup();
|
||||
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
|
||||
CGF.EmitBlock(NextCatchBlock);
|
||||
}
|
||||
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
|
||||
if (!AllMatched) {
|
||||
// None of the handlers caught the exception, so store it to be
|
||||
// rethrown at the end of the @finally block.
|
||||
|
@ -2784,59 +2917,34 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
|
||||
// Emit the exception handler for the @catch blocks.
|
||||
CGF.EmitBlock(CatchHandler);
|
||||
CGF.Builder.CreateStore(
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionExtractFn(),
|
||||
ExceptionData),
|
||||
RethrowPtr);
|
||||
CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(VMContext),
|
||||
CallTryExitPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
} else {
|
||||
|
||||
// Rethrow the new exception, not the old one.
|
||||
Caught = CGF.Builder.CreateCall(ObjCTypes.getExceptionExtractFn(),
|
||||
ExceptionData);
|
||||
Caught->setDoesNotThrow();
|
||||
CGF.Builder.CreateStore(Caught, RethrowPtr);
|
||||
|
||||
// Don't pop the catch handler; the throw already did.
|
||||
CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(VMContext),
|
||||
CallTryExitPtr);
|
||||
CallTryExitVar);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
}
|
||||
|
||||
// Pop the exception-handling stack entry. It is important to do
|
||||
// this now, because the code in the @finally block is not in this
|
||||
// context.
|
||||
CodeGenFunction::CleanupBlockInfo Info = CGF.PopCleanupBlock();
|
||||
// Pop the cleanup.
|
||||
CGF.PopCleanupBlock();
|
||||
CGF.EmitBlock(FinallyEnd.Block);
|
||||
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
|
||||
// Emit the @finally block.
|
||||
CGF.EmitBlock(FinallyBlock);
|
||||
llvm::Value* CallTryExit = CGF.Builder.CreateLoad(CallTryExitPtr, "tmp");
|
||||
|
||||
CGF.Builder.CreateCondBr(CallTryExit, FinallyExit, FinallyNoExit);
|
||||
|
||||
CGF.EmitBlock(FinallyExit);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionTryExitFn(), ExceptionData);
|
||||
|
||||
CGF.EmitBlock(FinallyNoExit);
|
||||
if (isTry) {
|
||||
if (const ObjCAtFinallyStmt* FinallyStmt =
|
||||
cast<ObjCAtTryStmt>(S).getFinallyStmt())
|
||||
CGF.EmitStmt(FinallyStmt->getFinallyBody());
|
||||
} else {
|
||||
// Emit objc_sync_exit(expr); as finally's sole statement for
|
||||
// @synchronized.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg);
|
||||
// Emit the rethrow block.
|
||||
CGF.Builder.ClearInsertionPoint();
|
||||
CGF.EmitBlock(FinallyRethrow.Block, true);
|
||||
if (CGF.HaveInsertPoint()) {
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionThrowFn(),
|
||||
CGF.Builder.CreateLoad(RethrowPtr))
|
||||
->setDoesNotThrow();
|
||||
CGF.Builder.CreateUnreachable();
|
||||
}
|
||||
|
||||
// Emit the switch block
|
||||
if (Info.SwitchBlock)
|
||||
CGF.EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
CGF.EmitBlock(Info.EndBlock);
|
||||
|
||||
CGF.EmitBlock(FinallyRethrow);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionThrowFn(),
|
||||
CGF.Builder.CreateLoad(RethrowPtr));
|
||||
CGF.Builder.CreateUnreachable();
|
||||
|
||||
CGF.EmitBlock(FinallyEnd);
|
||||
CGF.Builder.SetInsertPoint(FinallyEnd.Block);
|
||||
}
|
||||
|
||||
void CGObjCMac::EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
|
@ -2853,7 +2961,8 @@ void CGObjCMac::EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
|||
ExceptionAsObject = CGF.ObjCEHValueStack.back();
|
||||
}
|
||||
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionThrowFn(), ExceptionAsObject);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getExceptionThrowFn(), ExceptionAsObject)
|
||||
->setDoesNotReturn();
|
||||
CGF.Builder.CreateUnreachable();
|
||||
|
||||
// Clear the insertion point to indicate we are in unreachable code.
|
||||
|
@ -5572,75 +5681,77 @@ void CGObjCNonFragileABIMac::EmitObjCGlobalAssign(CodeGen::CodeGenFunction &CGF,
|
|||
}
|
||||
|
||||
void
|
||||
CGObjCNonFragileABIMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S) {
|
||||
bool isTry = isa<ObjCAtTryStmt>(S);
|
||||
llvm::BasicBlock *TryBlock = CGF.createBasicBlock("try");
|
||||
llvm::BasicBlock *PrevLandingPad = CGF.getInvokeDest();
|
||||
llvm::BasicBlock *TryHandler = CGF.createBasicBlock("try.handler");
|
||||
llvm::BasicBlock *FinallyBlock = CGF.createBasicBlock("finally");
|
||||
llvm::BasicBlock *FinallyRethrow = CGF.createBasicBlock("finally.throw");
|
||||
llvm::BasicBlock *FinallyEnd = CGF.createBasicBlock("finally.end");
|
||||
CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S) {
|
||||
// Evaluate the lock operand. This should dominate the cleanup.
|
||||
llvm::Value *SyncArg = CGF.EmitScalarExpr(S.getSynchExpr());
|
||||
|
||||
// For @synchronized, call objc_sync_enter(sync.expr). The
|
||||
// evaluation of the expression must occur before we enter the
|
||||
// @synchronized. We can safely avoid a temp here because jumps into
|
||||
// @synchronized are illegal & this will dominate uses.
|
||||
llvm::Value *SyncArg = 0;
|
||||
if (!isTry) {
|
||||
SyncArg =
|
||||
CGF.EmitScalarExpr(cast<ObjCAtSynchronizedStmt>(S).getSynchExpr());
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, ObjCTypes.ObjectPtrTy);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncEnterFn(), SyncArg);
|
||||
// Acquire the lock.
|
||||
SyncArg = CGF.Builder.CreateBitCast(SyncArg, ObjCTypes.ObjectPtrTy);
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncEnterFn(), SyncArg)
|
||||
->setDoesNotThrow();
|
||||
|
||||
// Register an all-paths cleanup to release the lock.
|
||||
{
|
||||
CodeGenFunction::CleanupBlock
|
||||
ReleaseScope(CGF, CodeGenFunction::NormalAndEHCleanup);
|
||||
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg)
|
||||
->setDoesNotThrow();
|
||||
}
|
||||
|
||||
// Push an EH context entry, used for handling rethrows and jumps
|
||||
// through finally.
|
||||
CGF.PushCleanupBlock(FinallyBlock);
|
||||
// Emit the body of the statement.
|
||||
CGF.EmitStmt(S.getSynchBody());
|
||||
|
||||
CGF.setInvokeDest(TryHandler);
|
||||
// Pop the lock-release cleanup.
|
||||
CGF.PopCleanupBlock();
|
||||
}
|
||||
|
||||
CGF.EmitBlock(TryBlock);
|
||||
CGF.EmitStmt(isTry ? cast<ObjCAtTryStmt>(S).getTryBody()
|
||||
: cast<ObjCAtSynchronizedStmt>(S).getSynchBody());
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
namespace {
|
||||
struct CatchHandler {
|
||||
const VarDecl *Variable;
|
||||
const Stmt *Body;
|
||||
llvm::BasicBlock *Block;
|
||||
llvm::Value *TypeInfo;
|
||||
};
|
||||
}
|
||||
|
||||
// Emit the exception handler.
|
||||
void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S) {
|
||||
// Jump destination for falling out of catch bodies.
|
||||
CodeGenFunction::JumpDest Cont;
|
||||
if (S.getNumCatchStmts())
|
||||
Cont = CGF.getJumpDestInCurrentScope("eh.cont");
|
||||
|
||||
CGF.EmitBlock(TryHandler);
|
||||
CodeGenFunction::FinallyInfo FinallyInfo;
|
||||
if (const ObjCAtFinallyStmt *Finally = S.getFinallyStmt())
|
||||
FinallyInfo = CGF.EnterFinallyBlock(Finally->getFinallyBody(),
|
||||
ObjCTypes.getObjCBeginCatchFn(),
|
||||
ObjCTypes.getObjCEndCatchFn(),
|
||||
ObjCTypes.getExceptionRethrowFn());
|
||||
|
||||
llvm::Value *llvm_eh_exception =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_exception);
|
||||
llvm::Value *llvm_eh_selector =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_selector);
|
||||
llvm::Value *llvm_eh_typeid_for =
|
||||
CGF.CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for);
|
||||
llvm::Value *Exc = CGF.Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
llvm::Value *RethrowPtr = CGF.CreateTempAlloca(Exc->getType(), "_rethrow");
|
||||
llvm::SmallVector<CatchHandler, 8> Handlers;
|
||||
|
||||
llvm::SmallVector<llvm::Value*, 8> SelectorArgs;
|
||||
SelectorArgs.push_back(Exc);
|
||||
SelectorArgs.push_back(ObjCTypes.getEHPersonalityPtr());
|
||||
|
||||
// Construct the lists of (type, catch body) to handle.
|
||||
llvm::SmallVector<std::pair<const VarDecl*, const Stmt*>, 8> Handlers;
|
||||
bool HasCatchAll = false;
|
||||
if (isTry) {
|
||||
const ObjCAtTryStmt &AtTry = cast<ObjCAtTryStmt>(S);
|
||||
for (unsigned I = 0, N = AtTry.getNumCatchStmts(); I != N; ++I) {
|
||||
const ObjCAtCatchStmt *CatchStmt = AtTry.getCatchStmt(I);
|
||||
// Enter the catch, if there is one.
|
||||
if (S.getNumCatchStmts()) {
|
||||
for (unsigned I = 0, N = S.getNumCatchStmts(); I != N; ++I) {
|
||||
const ObjCAtCatchStmt *CatchStmt = S.getCatchStmt(I);
|
||||
const VarDecl *CatchDecl = CatchStmt->getCatchParamDecl();
|
||||
Handlers.push_back(std::make_pair(CatchDecl, CatchStmt->getCatchBody()));
|
||||
|
||||
// catch(...) always matches.
|
||||
Handlers.push_back(CatchHandler());
|
||||
CatchHandler &Handler = Handlers.back();
|
||||
Handler.Variable = CatchDecl;
|
||||
Handler.Body = CatchStmt->getCatchBody();
|
||||
Handler.Block = CGF.createBasicBlock("catch");
|
||||
|
||||
// @catch(...) always matches.
|
||||
if (!CatchDecl) {
|
||||
// Use i8* null here to signal this is a catch all, not a cleanup.
|
||||
llvm::Value *Null = llvm::Constant::getNullValue(ObjCTypes.Int8PtrTy);
|
||||
SelectorArgs.push_back(Null);
|
||||
HasCatchAll = true;
|
||||
Handler.TypeInfo = 0; // catch-all
|
||||
// Don't consider any other catches.
|
||||
break;
|
||||
}
|
||||
|
||||
// There's a particular fixed type info for 'id'.
|
||||
if (CatchDecl->getType()->isObjCIdType() ||
|
||||
CatchDecl->getType()->isObjCQualifiedIdType()) {
|
||||
llvm::Value *IDEHType =
|
||||
|
@ -5651,7 +5762,7 @@ CGObjCNonFragileABIMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
false,
|
||||
llvm::GlobalValue::ExternalLinkage,
|
||||
0, "OBJC_EHTYPE_id");
|
||||
SelectorArgs.push_back(IDEHType);
|
||||
Handler.TypeInfo = IDEHType;
|
||||
} else {
|
||||
// All other types should be Objective-C interface pointer types.
|
||||
const ObjCObjectPointerType *PT =
|
||||
|
@ -5659,179 +5770,76 @@ CGObjCNonFragileABIMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
|||
assert(PT && "Invalid @catch type.");
|
||||
const ObjCInterfaceType *IT = PT->getInterfaceType();
|
||||
assert(IT && "Invalid @catch type.");
|
||||
llvm::Value *EHType = GetInterfaceEHType(IT->getDecl(), false);
|
||||
SelectorArgs.push_back(EHType);
|
||||
Handler.TypeInfo = GetInterfaceEHType(IT->getDecl(), false);
|
||||
}
|
||||
}
|
||||
|
||||
EHCatchScope *Catch = CGF.EHStack.pushCatch(Handlers.size());
|
||||
for (unsigned I = 0, E = Handlers.size(); I != E; ++I)
|
||||
Catch->setHandler(I, Handlers[I].TypeInfo, Handlers[I].Block);
|
||||
}
|
||||
|
||||
// Emit the try body.
|
||||
CGF.EmitStmt(S.getTryBody());
|
||||
|
||||
// We use a cleanup unless there was already a catch all.
|
||||
if (!HasCatchAll) {
|
||||
// Even though this is a cleanup, treat it as a catch all to avoid the C++
|
||||
// personality behavior of terminating the process if only cleanups are
|
||||
// found in the exception handling stack.
|
||||
SelectorArgs.push_back(llvm::Constant::getNullValue(ObjCTypes.Int8PtrTy));
|
||||
Handlers.push_back(std::make_pair((const ParmVarDecl*) 0, (const Stmt*) 0));
|
||||
}
|
||||
// Leave the try.
|
||||
if (S.getNumCatchStmts())
|
||||
CGF.EHStack.popCatch();
|
||||
|
||||
llvm::Value *Selector =
|
||||
CGF.Builder.CreateCall(llvm_eh_selector,
|
||||
SelectorArgs.begin(), SelectorArgs.end(),
|
||||
"selector");
|
||||
for (unsigned i = 0, e = Handlers.size(); i != e; ++i) {
|
||||
const VarDecl *CatchParam = Handlers[i].first;
|
||||
const Stmt *CatchBody = Handlers[i].second;
|
||||
// Remember where we were.
|
||||
CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
|
||||
|
||||
llvm::BasicBlock *Next = 0;
|
||||
// Emit the handlers.
|
||||
for (unsigned I = 0, E = Handlers.size(); I != E; ++I) {
|
||||
CatchHandler &Handler = Handlers[I];
|
||||
|
||||
// The last handler always matches.
|
||||
if (i + 1 != e) {
|
||||
assert(CatchParam && "Only last handler can be a catch all.");
|
||||
CGF.EmitBlock(Handler.Block);
|
||||
llvm::Value *RawExn = CGF.Builder.CreateLoad(CGF.getExceptionSlot());
|
||||
|
||||
llvm::BasicBlock *Match = CGF.createBasicBlock("match");
|
||||
Next = CGF.createBasicBlock("catch.next");
|
||||
llvm::Value *Id =
|
||||
CGF.Builder.CreateCall(llvm_eh_typeid_for,
|
||||
CGF.Builder.CreateBitCast(SelectorArgs[i+2],
|
||||
ObjCTypes.Int8PtrTy));
|
||||
CGF.Builder.CreateCondBr(CGF.Builder.CreateICmpEQ(Selector, Id),
|
||||
Match, Next);
|
||||
// Enter the catch.
|
||||
llvm::CallInst *Exn =
|
||||
CGF.Builder.CreateCall(ObjCTypes.getObjCBeginCatchFn(), RawExn,
|
||||
"exn.adjusted");
|
||||
Exn->setDoesNotThrow();
|
||||
|
||||
CGF.EmitBlock(Match);
|
||||
// Add a cleanup to leave the catch.
|
||||
{
|
||||
CodeGenFunction::CleanupBlock
|
||||
EndCatchBlock(CGF, CodeGenFunction::NormalAndEHCleanup);
|
||||
|
||||
// __objc_end_catch never throws.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getObjCEndCatchFn())
|
||||
->setDoesNotThrow();
|
||||
}
|
||||
|
||||
if (CatchBody) {
|
||||
llvm::BasicBlock *MatchEnd = CGF.createBasicBlock("match.end");
|
||||
// Bind the catch parameter if it exists.
|
||||
if (const VarDecl *CatchParam = Handler.Variable) {
|
||||
const llvm::Type *CatchType = CGF.ConvertType(CatchParam->getType());
|
||||
llvm::Value *CastExn = CGF.Builder.CreateBitCast(Exn, CatchType);
|
||||
|
||||
// Cleanups must call objc_end_catch.
|
||||
CGF.PushCleanupBlock(MatchEnd);
|
||||
|
||||
llvm::Value *ExcObject =
|
||||
CGF.Builder.CreateCall(ObjCTypes.getObjCBeginCatchFn(), Exc);
|
||||
|
||||
// Bind the catch parameter if it exists.
|
||||
if (CatchParam) {
|
||||
ExcObject =
|
||||
CGF.Builder.CreateBitCast(ExcObject,
|
||||
CGF.ConvertType(CatchParam->getType()));
|
||||
// CatchParam is a ParmVarDecl because of the grammar
|
||||
// construction used to handle this, but for codegen purposes
|
||||
// we treat this as a local decl.
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
CGF.Builder.CreateStore(ExcObject, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
}
|
||||
|
||||
// Exceptions inside the catch block must be rethrown. We set a special
|
||||
// purpose invoke destination for this which just collects the thrown
|
||||
// exception and overwrites the object in RethrowPtr, branches through the
|
||||
// match.end to make sure we call objc_end_catch, before branching to the
|
||||
// rethrow handler.
|
||||
llvm::BasicBlock *MatchHandler = CGF.createBasicBlock("match.handler");
|
||||
CGF.setInvokeDest(MatchHandler);
|
||||
CGF.ObjCEHValueStack.push_back(ExcObject);
|
||||
CGF.EmitStmt(CatchBody);
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
CGF.setInvokeDest(0);
|
||||
|
||||
CGF.EmitBranchThroughCleanup(FinallyEnd);
|
||||
|
||||
// Don't emit the extra match handler if there we no unprotected calls in
|
||||
// the catch block.
|
||||
if (MatchHandler->use_empty()) {
|
||||
delete MatchHandler;
|
||||
} else {
|
||||
CGF.EmitBlock(MatchHandler);
|
||||
llvm::Value *Exc = CGF.Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
// We are required to emit this call to satisfy LLVM, even
|
||||
// though we don't use the result.
|
||||
CGF.Builder.CreateCall3(llvm_eh_selector,
|
||||
Exc, ObjCTypes.getEHPersonalityPtr(),
|
||||
llvm::ConstantInt::get(CGF.Int32Ty, 0),
|
||||
"unused_eh_selector");
|
||||
CGF.Builder.CreateStore(Exc, RethrowPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
}
|
||||
|
||||
CodeGenFunction::CleanupBlockInfo Info = CGF.PopCleanupBlock();
|
||||
|
||||
CGF.EmitBlock(MatchEnd);
|
||||
|
||||
// Unfortunately, we also have to generate another EH frame here
|
||||
// in case this throws.
|
||||
llvm::BasicBlock *MatchEndHandler =
|
||||
CGF.createBasicBlock("match.end.handler");
|
||||
llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
|
||||
CGF.Builder.CreateInvoke(ObjCTypes.getObjCEndCatchFn(),
|
||||
Cont, MatchEndHandler);
|
||||
|
||||
CGF.EmitBlock(Cont);
|
||||
if (Info.SwitchBlock)
|
||||
CGF.EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
CGF.EmitBlock(Info.EndBlock);
|
||||
|
||||
CGF.EmitBlock(MatchEndHandler);
|
||||
llvm::Value *Exc = CGF.Builder.CreateCall(llvm_eh_exception, "exc");
|
||||
// We are required to emit this call to satisfy LLVM, even
|
||||
// though we don't use the result.
|
||||
CGF.Builder.CreateCall3(llvm_eh_selector,
|
||||
Exc, ObjCTypes.getEHPersonalityPtr(),
|
||||
llvm::ConstantInt::get(CGF.Int32Ty, 0),
|
||||
"unused_eh_selector");
|
||||
CGF.Builder.CreateStore(Exc, RethrowPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
|
||||
if (Next)
|
||||
CGF.EmitBlock(Next);
|
||||
} else {
|
||||
assert(!Next && "catchup should be last handler.");
|
||||
|
||||
CGF.Builder.CreateStore(Exc, RethrowPtr);
|
||||
CGF.EmitBranchThroughCleanup(FinallyRethrow);
|
||||
CGF.EmitLocalBlockVarDecl(*CatchParam);
|
||||
CGF.Builder.CreateStore(CastExn, CGF.GetAddrOfLocalVar(CatchParam));
|
||||
}
|
||||
}
|
||||
|
||||
// Pop the cleanup entry, the @finally is outside this cleanup
|
||||
// scope.
|
||||
CodeGenFunction::CleanupBlockInfo Info = CGF.PopCleanupBlock();
|
||||
CGF.setInvokeDest(PrevLandingPad);
|
||||
CGF.ObjCEHValueStack.push_back(Exn);
|
||||
CGF.EmitStmt(Handler.Body);
|
||||
CGF.ObjCEHValueStack.pop_back();
|
||||
|
||||
CGF.EmitBlock(FinallyBlock);
|
||||
// Leave the earlier cleanup.
|
||||
CGF.PopCleanupBlock();
|
||||
|
||||
if (isTry) {
|
||||
if (const ObjCAtFinallyStmt* FinallyStmt =
|
||||
cast<ObjCAtTryStmt>(S).getFinallyStmt())
|
||||
CGF.EmitStmt(FinallyStmt->getFinallyBody());
|
||||
} else {
|
||||
// Emit 'objc_sync_exit(expr)' as finally's sole statement for
|
||||
// @synchronized.
|
||||
CGF.Builder.CreateCall(ObjCTypes.getSyncExitFn(), SyncArg);
|
||||
}
|
||||
CGF.EmitBranchThroughCleanup(Cont);
|
||||
}
|
||||
|
||||
if (Info.SwitchBlock)
|
||||
CGF.EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
CGF.EmitBlock(Info.EndBlock);
|
||||
// Go back to the try-statement fallthrough.
|
||||
CGF.Builder.restoreIP(SavedIP);
|
||||
|
||||
// Branch around the rethrow code.
|
||||
CGF.EmitBranch(FinallyEnd);
|
||||
// Pop out of the normal cleanup on the finally.
|
||||
if (S.getFinallyStmt())
|
||||
CGF.ExitFinallyBlock(FinallyInfo);
|
||||
|
||||
// Generate the rethrow code, taking care to use an invoke if we are in a
|
||||
// nested exception scope.
|
||||
CGF.EmitBlock(FinallyRethrow);
|
||||
if (PrevLandingPad) {
|
||||
llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
|
||||
CGF.Builder.CreateInvoke(ObjCTypes.getUnwindResumeOrRethrowFn(),
|
||||
Cont, PrevLandingPad,
|
||||
CGF.Builder.CreateLoad(RethrowPtr));
|
||||
CGF.EmitBlock(Cont);
|
||||
} else {
|
||||
CGF.Builder.CreateCall(ObjCTypes.getUnwindResumeOrRethrowFn(),
|
||||
CGF.Builder.CreateLoad(RethrowPtr));
|
||||
}
|
||||
CGF.Builder.CreateUnreachable();
|
||||
|
||||
CGF.EmitBlock(FinallyEnd);
|
||||
if (Cont.Block)
|
||||
CGF.EmitBlock(Cont.Block);
|
||||
}
|
||||
|
||||
/// EmitThrowStmt - Generate code for a throw statement.
|
||||
|
@ -5853,14 +5861,14 @@ void CGObjCNonFragileABIMac::EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
|||
CGF.Builder.CreateBitCast(Exception, ObjCTypes.ObjectPtrTy, "tmp");
|
||||
llvm::BasicBlock *InvokeDest = CGF.getInvokeDest();
|
||||
if (InvokeDest) {
|
||||
llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
|
||||
CGF.Builder.CreateInvoke(FunctionThrowOrRethrow,
|
||||
Cont, InvokeDest,
|
||||
CGF.getUnreachableBlock(), InvokeDest,
|
||||
&ExceptionAsObject, &ExceptionAsObject + 1);
|
||||
CGF.EmitBlock(Cont);
|
||||
} else
|
||||
CGF.Builder.CreateCall(FunctionThrowOrRethrow, ExceptionAsObject);
|
||||
CGF.Builder.CreateUnreachable();
|
||||
} else {
|
||||
CGF.Builder.CreateCall(FunctionThrowOrRethrow, ExceptionAsObject)
|
||||
->setDoesNotReturn();
|
||||
CGF.Builder.CreateUnreachable();
|
||||
}
|
||||
|
||||
// Clear the insertion point to indicate we are in unreachable code.
|
||||
CGF.Builder.ClearInsertionPoint();
|
||||
|
|
|
@ -181,8 +181,10 @@ public:
|
|||
/// compiler when a mutation is detected during foreach iteration.
|
||||
virtual llvm::Constant *EnumerationMutationFunction() = 0;
|
||||
|
||||
virtual void EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const Stmt &S) = 0;
|
||||
virtual void EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtSynchronizedStmt &S) = 0;
|
||||
virtual void EmitTryStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtTryStmt &S) = 0;
|
||||
virtual void EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
|
||||
const ObjCAtThrowStmt &S) = 0;
|
||||
virtual llvm::Value *EmitObjCWeakRead(CodeGen::CodeGenFunction &CGF,
|
||||
|
|
|
@ -79,11 +79,8 @@ void CodeGenFunction::EmitStmt(const Stmt *S) {
|
|||
// Expression emitters don't handle unreachable blocks yet, so look for one
|
||||
// explicitly here. This handles the common case of a call to a noreturn
|
||||
// function.
|
||||
// We can't erase blocks with an associated cleanup size here since the
|
||||
// memory might be reused, leaving the old cleanup info pointing at a new
|
||||
// block.
|
||||
if (llvm::BasicBlock *CurBB = Builder.GetInsertBlock()) {
|
||||
if (CurBB->empty() && CurBB->use_empty() && !BlockScopes.count(CurBB)) {
|
||||
if (CurBB->empty() && CurBB->use_empty()) {
|
||||
CurBB->eraseFromParent();
|
||||
Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
@ -159,7 +156,7 @@ RValue CodeGenFunction::EmitCompoundStmt(const CompoundStmt &S, bool GetLast,
|
|||
}
|
||||
|
||||
// Keep track of the current cleanup stack depth.
|
||||
CleanupScope Scope(*this);
|
||||
RunCleanupsScope Scope(*this);
|
||||
|
||||
for (CompoundStmt::const_body_iterator I = S.body_begin(),
|
||||
E = S.body_end()-GetLast; I != E; ++I)
|
||||
|
@ -198,7 +195,7 @@ void CodeGenFunction::SimplifyForwardingBlocks(llvm::BasicBlock *BB) {
|
|||
// If there is a cleanup stack, then we it isn't worth trying to
|
||||
// simplify this block (we would need to remove it from the scope map
|
||||
// and cleanup entry).
|
||||
if (!CleanupEntries.empty())
|
||||
if (!EHStack.empty())
|
||||
return;
|
||||
|
||||
// Can only simplify direct branches.
|
||||
|
@ -221,18 +218,6 @@ void CodeGenFunction::EmitBlock(llvm::BasicBlock *BB, bool IsFinished) {
|
|||
return;
|
||||
}
|
||||
|
||||
// If necessary, associate the block with the cleanup stack size.
|
||||
if (!CleanupEntries.empty()) {
|
||||
// Check if the basic block has already been inserted.
|
||||
BlockScopeMap::iterator I = BlockScopes.find(BB);
|
||||
if (I != BlockScopes.end()) {
|
||||
assert(I->second == CleanupEntries.size() - 1);
|
||||
} else {
|
||||
BlockScopes[BB] = CleanupEntries.size() - 1;
|
||||
CleanupEntries.back().Blocks.push_back(BB);
|
||||
}
|
||||
}
|
||||
|
||||
// Place the block after the current block, if possible, or else at
|
||||
// the end of the function.
|
||||
if (CurBB && CurBB->getParent())
|
||||
|
@ -259,8 +244,35 @@ void CodeGenFunction::EmitBranch(llvm::BasicBlock *Target) {
|
|||
Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
||||
CodeGenFunction::JumpDest
|
||||
CodeGenFunction::getJumpDestForLabel(const LabelStmt *S) {
|
||||
JumpDest &Dest = LabelMap[S];
|
||||
if (Dest.Block) return Dest;
|
||||
|
||||
// Create, but don't insert, the new block.
|
||||
Dest.Block = createBasicBlock(S->getName());
|
||||
Dest.ScopeDepth = EHScopeStack::stable_iterator::invalid();
|
||||
return Dest;
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitLabel(const LabelStmt &S) {
|
||||
EmitBlock(getBasicBlockForLabel(&S));
|
||||
JumpDest &Dest = LabelMap[&S];
|
||||
|
||||
// If we didn't needed a forward reference to this label, just go
|
||||
// ahead and create a destination at the current scope.
|
||||
if (!Dest.Block) {
|
||||
Dest = getJumpDestInCurrentScope(S.getName());
|
||||
|
||||
// Otherwise, we need to give this label a target depth and remove
|
||||
// it from the branch-fixups list.
|
||||
} else {
|
||||
assert(!Dest.ScopeDepth.isValid() && "already emitted label!");
|
||||
Dest.ScopeDepth = EHStack.stable_begin();
|
||||
|
||||
EHStack.resolveBranchFixups(Dest.Block);
|
||||
}
|
||||
|
||||
EmitBlock(Dest.Block);
|
||||
}
|
||||
|
||||
|
||||
|
@ -276,7 +288,7 @@ void CodeGenFunction::EmitGotoStmt(const GotoStmt &S) {
|
|||
if (HaveInsertPoint())
|
||||
EmitStopPoint(&S);
|
||||
|
||||
EmitBranchThroughCleanup(getBasicBlockForLabel(S.getLabel()));
|
||||
EmitBranchThroughCleanup(getJumpDestForLabel(S.getLabel()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -301,7 +313,7 @@ void CodeGenFunction::EmitIndirectGotoStmt(const IndirectGotoStmt &S) {
|
|||
void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
|
||||
// C99 6.8.4.1: The first substatement is executed if the expression compares
|
||||
// unequal to 0. The condition must be a scalar type.
|
||||
CleanupScope ConditionScope(*this);
|
||||
RunCleanupsScope ConditionScope(*this);
|
||||
|
||||
if (S.getConditionVariable())
|
||||
EmitLocalBlockVarDecl(*S.getConditionVariable());
|
||||
|
@ -318,7 +330,7 @@ void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
|
|||
// This avoids emitting dead code and simplifies the CFG substantially.
|
||||
if (!ContainsLabel(Skipped)) {
|
||||
if (Executed) {
|
||||
CleanupScope ExecutedScope(*this);
|
||||
RunCleanupsScope ExecutedScope(*this);
|
||||
EmitStmt(Executed);
|
||||
}
|
||||
return;
|
||||
|
@ -337,7 +349,7 @@ void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
|
|||
// Emit the 'then' code.
|
||||
EmitBlock(ThenBlock);
|
||||
{
|
||||
CleanupScope ThenScope(*this);
|
||||
RunCleanupsScope ThenScope(*this);
|
||||
EmitStmt(S.getThen());
|
||||
}
|
||||
EmitBranch(ContBlock);
|
||||
|
@ -346,7 +358,7 @@ void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
|
|||
if (const Stmt *Else = S.getElse()) {
|
||||
EmitBlock(ElseBlock);
|
||||
{
|
||||
CleanupScope ElseScope(*this);
|
||||
RunCleanupsScope ElseScope(*this);
|
||||
EmitStmt(Else);
|
||||
}
|
||||
EmitBranch(ContBlock);
|
||||
|
@ -357,20 +369,17 @@ void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
|
|||
}
|
||||
|
||||
void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
||||
// Emit the header for the loop, insert it, which will create an uncond br to
|
||||
// it.
|
||||
llvm::BasicBlock *LoopHeader = createBasicBlock("while.cond");
|
||||
EmitBlock(LoopHeader);
|
||||
// Emit the header for the loop, which will also become
|
||||
// the continue target.
|
||||
JumpDest LoopHeader = getJumpDestInCurrentScope("while.cond");
|
||||
EmitBlock(LoopHeader.Block);
|
||||
|
||||
// Create an exit block for when the condition fails, create a block for the
|
||||
// body of the loop.
|
||||
llvm::BasicBlock *ExitBlock = createBasicBlock("while.end");
|
||||
llvm::BasicBlock *LoopBody = createBasicBlock("while.body");
|
||||
llvm::BasicBlock *CleanupBlock = 0;
|
||||
llvm::BasicBlock *EffectiveExitBlock = ExitBlock;
|
||||
// Create an exit block for when the condition fails, which will
|
||||
// also become the break target.
|
||||
JumpDest LoopExit = getJumpDestInCurrentScope("while.end");
|
||||
|
||||
// Store the blocks to use for break and continue.
|
||||
BreakContinueStack.push_back(BreakContinue(ExitBlock, LoopHeader));
|
||||
BreakContinueStack.push_back(BreakContinue(LoopExit, LoopHeader));
|
||||
|
||||
// C++ [stmt.while]p2:
|
||||
// When the condition of a while statement is a declaration, the
|
||||
|
@ -379,18 +388,10 @@ void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
|||
// [...]
|
||||
// The object created in a condition is destroyed and created
|
||||
// with each iteration of the loop.
|
||||
CleanupScope ConditionScope(*this);
|
||||
RunCleanupsScope ConditionScope(*this);
|
||||
|
||||
if (S.getConditionVariable()) {
|
||||
if (S.getConditionVariable())
|
||||
EmitLocalBlockVarDecl(*S.getConditionVariable());
|
||||
|
||||
// If this condition variable requires cleanups, create a basic
|
||||
// block to handle those cleanups.
|
||||
if (ConditionScope.requiresCleanups()) {
|
||||
CleanupBlock = createBasicBlock("while.cleanup");
|
||||
EffectiveExitBlock = CleanupBlock;
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate the conditional in the while header. C99 6.8.5.1: The
|
||||
// evaluation of the controlling expression takes place before each
|
||||
|
@ -405,61 +406,63 @@ void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
|
|||
EmitBoolCondBranch = false;
|
||||
|
||||
// As long as the condition is true, go to the loop body.
|
||||
if (EmitBoolCondBranch)
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, EffectiveExitBlock);
|
||||
llvm::BasicBlock *LoopBody = createBasicBlock("while.body");
|
||||
if (EmitBoolCondBranch) {
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.Block;
|
||||
if (ConditionScope.requiresCleanups())
|
||||
ExitBlock = createBasicBlock("while.exit");
|
||||
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, ExitBlock);
|
||||
|
||||
if (ExitBlock != LoopExit.Block) {
|
||||
EmitBlock(ExitBlock);
|
||||
EmitBranchThroughCleanup(LoopExit);
|
||||
}
|
||||
}
|
||||
|
||||
// Emit the loop body.
|
||||
// Emit the loop body. We have to emit this in a cleanup scope
|
||||
// because it might be a singleton DeclStmt.
|
||||
{
|
||||
CleanupScope BodyScope(*this);
|
||||
RunCleanupsScope BodyScope(*this);
|
||||
EmitBlock(LoopBody);
|
||||
EmitStmt(S.getBody());
|
||||
}
|
||||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
if (CleanupBlock) {
|
||||
// If we have a cleanup block, jump there to perform cleanups
|
||||
// before looping.
|
||||
EmitBranch(CleanupBlock);
|
||||
// Immediately force cleanup.
|
||||
ConditionScope.ForceCleanup();
|
||||
|
||||
// Emit the cleanup block, performing cleanups for the condition
|
||||
// and then jumping to either the loop header or the exit block.
|
||||
EmitBlock(CleanupBlock);
|
||||
ConditionScope.ForceCleanup();
|
||||
Builder.CreateCondBr(BoolCondVal, LoopHeader, ExitBlock);
|
||||
} else {
|
||||
// Cycle to the condition.
|
||||
EmitBranch(LoopHeader);
|
||||
}
|
||||
// Branch to the loop header again.
|
||||
EmitBranch(LoopHeader.Block);
|
||||
|
||||
// Emit the exit block.
|
||||
EmitBlock(ExitBlock, true);
|
||||
|
||||
EmitBlock(LoopExit.Block, true);
|
||||
|
||||
// The LoopHeader typically is just a branch if we skipped emitting
|
||||
// a branch, try to erase it.
|
||||
if (!EmitBoolCondBranch && !CleanupBlock)
|
||||
SimplifyForwardingBlocks(LoopHeader);
|
||||
if (!EmitBoolCondBranch)
|
||||
SimplifyForwardingBlocks(LoopHeader.Block);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
|
||||
// Emit the body for the loop, insert it, which will create an uncond br to
|
||||
// it.
|
||||
llvm::BasicBlock *LoopBody = createBasicBlock("do.body");
|
||||
llvm::BasicBlock *AfterDo = createBasicBlock("do.end");
|
||||
EmitBlock(LoopBody);
|
||||
|
||||
llvm::BasicBlock *DoCond = createBasicBlock("do.cond");
|
||||
JumpDest LoopExit = getJumpDestInCurrentScope("do.end");
|
||||
JumpDest LoopCond = getJumpDestInCurrentScope("do.cond");
|
||||
|
||||
// Store the blocks to use for break and continue.
|
||||
BreakContinueStack.push_back(BreakContinue(AfterDo, DoCond));
|
||||
BreakContinueStack.push_back(BreakContinue(LoopExit, LoopCond));
|
||||
|
||||
// Emit the body of the loop into the block.
|
||||
EmitStmt(S.getBody());
|
||||
// Emit the body of the loop.
|
||||
llvm::BasicBlock *LoopBody = createBasicBlock("do.body");
|
||||
EmitBlock(LoopBody);
|
||||
{
|
||||
RunCleanupsScope BodyScope(*this);
|
||||
EmitStmt(S.getBody());
|
||||
}
|
||||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
EmitBlock(DoCond);
|
||||
EmitBlock(LoopCond.Block);
|
||||
|
||||
// C99 6.8.5.2: "The evaluation of the controlling expression takes place
|
||||
// after each execution of the loop body."
|
||||
|
@ -478,47 +481,49 @@ void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
|
|||
|
||||
// As long as the condition is true, iterate the loop.
|
||||
if (EmitBoolCondBranch)
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, AfterDo);
|
||||
Builder.CreateCondBr(BoolCondVal, LoopBody, LoopExit.Block);
|
||||
|
||||
// Emit the exit block.
|
||||
EmitBlock(AfterDo);
|
||||
EmitBlock(LoopExit.Block);
|
||||
|
||||
// The DoCond block typically is just a branch if we skipped
|
||||
// emitting a branch, try to erase it.
|
||||
if (!EmitBoolCondBranch)
|
||||
SimplifyForwardingBlocks(DoCond);
|
||||
SimplifyForwardingBlocks(LoopCond.Block);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
||||
CleanupScope ForScope(*this);
|
||||
JumpDest LoopExit = getJumpDestInCurrentScope("for.end");
|
||||
|
||||
RunCleanupsScope ForScope(*this);
|
||||
|
||||
// Evaluate the first part before the loop.
|
||||
if (S.getInit())
|
||||
EmitStmt(S.getInit());
|
||||
|
||||
// Start the loop with a block that tests the condition.
|
||||
llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
|
||||
llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
|
||||
llvm::BasicBlock *IncBlock = 0;
|
||||
llvm::BasicBlock *CondCleanup = 0;
|
||||
llvm::BasicBlock *EffectiveExitBlock = AfterFor;
|
||||
// If there's an increment, the continue scope will be overwritten
|
||||
// later.
|
||||
JumpDest Continue = getJumpDestInCurrentScope("for.cond");
|
||||
llvm::BasicBlock *CondBlock = Continue.Block;
|
||||
EmitBlock(CondBlock);
|
||||
|
||||
// Create a cleanup scope for the condition variable cleanups.
|
||||
CleanupScope ConditionScope(*this);
|
||||
RunCleanupsScope ConditionScope(*this);
|
||||
|
||||
llvm::Value *BoolCondVal = 0;
|
||||
if (S.getCond()) {
|
||||
// If the for statement has a condition scope, emit the local variable
|
||||
// declaration.
|
||||
llvm::BasicBlock *ExitBlock = LoopExit.Block;
|
||||
if (S.getConditionVariable()) {
|
||||
EmitLocalBlockVarDecl(*S.getConditionVariable());
|
||||
|
||||
if (ConditionScope.requiresCleanups()) {
|
||||
CondCleanup = createBasicBlock("for.cond.cleanup");
|
||||
EffectiveExitBlock = CondCleanup;
|
||||
}
|
||||
}
|
||||
|
||||
// If there are any cleanups between here and the loop-exit scope,
|
||||
// create a block to stage a loop exit along.
|
||||
if (ForScope.requiresCleanups())
|
||||
ExitBlock = createBasicBlock("for.cond.cleanup");
|
||||
|
||||
// As long as the condition is true, iterate the loop.
|
||||
llvm::BasicBlock *ForBody = createBasicBlock("for.body");
|
||||
|
@ -526,7 +531,12 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
// C99 6.8.5p2/p4: The first substatement is executed if the expression
|
||||
// compares unequal to 0. The condition must be a scalar type.
|
||||
BoolCondVal = EvaluateExprAsBool(S.getCond());
|
||||
Builder.CreateCondBr(BoolCondVal, ForBody, EffectiveExitBlock);
|
||||
Builder.CreateCondBr(BoolCondVal, ForBody, ExitBlock);
|
||||
|
||||
if (ExitBlock != LoopExit.Block) {
|
||||
EmitBlock(ExitBlock);
|
||||
EmitBranchThroughCleanup(LoopExit);
|
||||
}
|
||||
|
||||
EmitBlock(ForBody);
|
||||
} else {
|
||||
|
@ -535,17 +545,15 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
}
|
||||
|
||||
// If the for loop doesn't have an increment we can just use the
|
||||
// condition as the continue block.
|
||||
llvm::BasicBlock *ContinueBlock;
|
||||
// condition as the continue block. Otherwise we'll need to create
|
||||
// a block for it (in the current scope, i.e. in the scope of the
|
||||
// condition), and that we will become our continue block.
|
||||
if (S.getInc())
|
||||
ContinueBlock = IncBlock = createBasicBlock("for.inc");
|
||||
else
|
||||
ContinueBlock = CondBlock;
|
||||
Continue = getJumpDestInCurrentScope("for.inc");
|
||||
|
||||
// Store the blocks to use for break and continue.
|
||||
BreakContinueStack.push_back(BreakContinue(AfterFor, ContinueBlock));
|
||||
BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
|
||||
|
||||
// If the condition is true, execute the body of the for stmt.
|
||||
CGDebugInfo *DI = getDebugInfo();
|
||||
if (DI) {
|
||||
DI->setLocation(S.getSourceRange().getBegin());
|
||||
|
@ -555,37 +563,30 @@ void CodeGenFunction::EmitForStmt(const ForStmt &S) {
|
|||
{
|
||||
// Create a separate cleanup scope for the body, in case it is not
|
||||
// a compound statement.
|
||||
CleanupScope BodyScope(*this);
|
||||
RunCleanupsScope BodyScope(*this);
|
||||
EmitStmt(S.getBody());
|
||||
}
|
||||
|
||||
// If there is an increment, emit it next.
|
||||
if (S.getInc()) {
|
||||
EmitBlock(IncBlock);
|
||||
EmitBlock(Continue.Block);
|
||||
EmitStmt(S.getInc());
|
||||
}
|
||||
|
||||
BreakContinueStack.pop_back();
|
||||
|
||||
// Finally, branch back up to the condition for the next iteration.
|
||||
if (CondCleanup) {
|
||||
// Branch to the cleanup block.
|
||||
EmitBranch(CondCleanup);
|
||||
|
||||
// Emit the cleanup block, which branches back to the loop body or
|
||||
// outside of the for statement once it is done.
|
||||
EmitBlock(CondCleanup);
|
||||
ConditionScope.ForceCleanup();
|
||||
Builder.CreateCondBr(BoolCondVal, CondBlock, AfterFor);
|
||||
} else
|
||||
EmitBranch(CondBlock);
|
||||
ConditionScope.ForceCleanup();
|
||||
EmitBranch(CondBlock);
|
||||
|
||||
ForScope.ForceCleanup();
|
||||
|
||||
if (DI) {
|
||||
DI->setLocation(S.getSourceRange().getEnd());
|
||||
DI->EmitRegionEnd(CurFn, Builder);
|
||||
}
|
||||
|
||||
// Emit the fall-through block.
|
||||
EmitBlock(AfterFor, true);
|
||||
EmitBlock(LoopExit.Block, true);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitReturnOfRValue(RValue RV, QualType Ty) {
|
||||
|
@ -666,7 +667,7 @@ void CodeGenFunction::EmitBreakStmt(const BreakStmt &S) {
|
|||
if (HaveInsertPoint())
|
||||
EmitStopPoint(&S);
|
||||
|
||||
llvm::BasicBlock *Block = BreakContinueStack.back().BreakBlock;
|
||||
JumpDest Block = BreakContinueStack.back().BreakBlock;
|
||||
EmitBranchThroughCleanup(Block);
|
||||
}
|
||||
|
||||
|
@ -679,7 +680,7 @@ void CodeGenFunction::EmitContinueStmt(const ContinueStmt &S) {
|
|||
if (HaveInsertPoint())
|
||||
EmitStopPoint(&S);
|
||||
|
||||
llvm::BasicBlock *Block = BreakContinueStack.back().ContinueBlock;
|
||||
JumpDest Block = BreakContinueStack.back().ContinueBlock;
|
||||
EmitBranchThroughCleanup(Block);
|
||||
}
|
||||
|
||||
|
@ -788,7 +789,9 @@ void CodeGenFunction::EmitDefaultStmt(const DefaultStmt &S) {
|
|||
}
|
||||
|
||||
void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
|
||||
CleanupScope ConditionScope(*this);
|
||||
JumpDest SwitchExit = getJumpDestInCurrentScope("sw.epilog");
|
||||
|
||||
RunCleanupsScope ConditionScope(*this);
|
||||
|
||||
if (S.getConditionVariable())
|
||||
EmitLocalBlockVarDecl(*S.getConditionVariable());
|
||||
|
@ -803,7 +806,6 @@ void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
|
|||
// statement. We also need to create a default block now so that
|
||||
// explicit case ranges tests can have a place to jump to on
|
||||
// failure.
|
||||
llvm::BasicBlock *NextBlock = createBasicBlock("sw.epilog");
|
||||
llvm::BasicBlock *DefaultBlock = createBasicBlock("sw.default");
|
||||
SwitchInsn = Builder.CreateSwitch(CondV, DefaultBlock);
|
||||
CaseRangeBlock = DefaultBlock;
|
||||
|
@ -813,12 +815,11 @@ void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
|
|||
|
||||
// All break statements jump to NextBlock. If BreakContinueStack is non empty
|
||||
// then reuse last ContinueBlock.
|
||||
llvm::BasicBlock *ContinueBlock = 0;
|
||||
JumpDest OuterContinue;
|
||||
if (!BreakContinueStack.empty())
|
||||
ContinueBlock = BreakContinueStack.back().ContinueBlock;
|
||||
OuterContinue = BreakContinueStack.back().ContinueBlock;
|
||||
|
||||
// Ensure any vlas created between there and here, are undone
|
||||
BreakContinueStack.push_back(BreakContinue(NextBlock, ContinueBlock));
|
||||
BreakContinueStack.push_back(BreakContinue(SwitchExit, OuterContinue));
|
||||
|
||||
// Emit switch body.
|
||||
EmitStmt(S.getBody());
|
||||
|
@ -829,15 +830,22 @@ void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
|
|||
// been chained on top.
|
||||
SwitchInsn->setSuccessor(0, CaseRangeBlock);
|
||||
|
||||
// If a default was never emitted then reroute any jumps to it and
|
||||
// discard.
|
||||
// If a default was never emitted:
|
||||
if (!DefaultBlock->getParent()) {
|
||||
DefaultBlock->replaceAllUsesWith(NextBlock);
|
||||
delete DefaultBlock;
|
||||
// If we have cleanups, emit the default block so that there's a
|
||||
// place to jump through the cleanups from.
|
||||
if (ConditionScope.requiresCleanups()) {
|
||||
EmitBlock(DefaultBlock);
|
||||
|
||||
// Otherwise, just forward the default block to the switch end.
|
||||
} else {
|
||||
DefaultBlock->replaceAllUsesWith(SwitchExit.Block);
|
||||
delete DefaultBlock;
|
||||
}
|
||||
}
|
||||
|
||||
// Emit continuation.
|
||||
EmitBlock(NextBlock, true);
|
||||
EmitBlock(SwitchExit.Block, true);
|
||||
|
||||
SwitchInsn = SavedSwitchInsn;
|
||||
CaseRangeBlock = SavedCRBlock;
|
||||
|
|
|
@ -15,14 +15,38 @@
|
|||
using namespace clang;
|
||||
using namespace CodeGen;
|
||||
|
||||
void CodeGenFunction::PushCXXTemporary(const CXXTemporary *Temporary,
|
||||
llvm::Value *Ptr) {
|
||||
assert((LiveTemporaries.empty() ||
|
||||
LiveTemporaries.back().ThisPtr != Ptr ||
|
||||
ConditionalBranchLevel) &&
|
||||
"Pushed the same temporary twice; AST is likely wrong");
|
||||
llvm::BasicBlock *DtorBlock = createBasicBlock("temp.dtor");
|
||||
static void EmitTemporaryCleanup(CodeGenFunction &CGF,
|
||||
const CXXTemporary *Temporary,
|
||||
llvm::Value *Addr,
|
||||
llvm::Value *CondPtr) {
|
||||
llvm::BasicBlock *CondEnd = 0;
|
||||
|
||||
// If this is a conditional temporary, we need to check the condition
|
||||
// boolean and only call the destructor if it's true.
|
||||
if (CondPtr) {
|
||||
llvm::BasicBlock *CondBlock = CGF.createBasicBlock("temp.cond-dtor.call");
|
||||
CondEnd = CGF.createBasicBlock("temp.cond-dtor.cont");
|
||||
|
||||
llvm::Value *Cond = CGF.Builder.CreateLoad(CondPtr);
|
||||
CGF.Builder.CreateCondBr(Cond, CondBlock, CondEnd);
|
||||
CGF.EmitBlock(CondBlock);
|
||||
}
|
||||
|
||||
CGF.EmitCXXDestructorCall(Temporary->getDestructor(),
|
||||
Dtor_Complete, /*ForVirtualBase=*/false,
|
||||
Addr);
|
||||
|
||||
if (CondPtr) {
|
||||
// Reset the condition to false.
|
||||
CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(CGF.getLLVMContext()),
|
||||
CondPtr);
|
||||
CGF.EmitBlock(CondEnd);
|
||||
}
|
||||
}
|
||||
|
||||
/// Emits all the code to cause the given temporary to be cleaned up.
|
||||
void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
|
||||
llvm::Value *Ptr) {
|
||||
llvm::AllocaInst *CondPtr = 0;
|
||||
|
||||
// Check if temporaries need to be conditional. If so, we'll create a
|
||||
|
@ -38,123 +62,37 @@ void CodeGenFunction::PushCXXTemporary(const CXXTemporary *Temporary,
|
|||
Builder.CreateStore(llvm::ConstantInt::getTrue(VMContext), CondPtr);
|
||||
}
|
||||
|
||||
LiveTemporaries.push_back(CXXLiveTemporaryInfo(Temporary, Ptr, DtorBlock,
|
||||
CondPtr));
|
||||
|
||||
PushCleanupBlock(DtorBlock);
|
||||
CleanupBlock Cleanup(*this, NormalCleanup);
|
||||
EmitTemporaryCleanup(*this, Temporary, Ptr, CondPtr);
|
||||
|
||||
if (Exceptions) {
|
||||
const CXXLiveTemporaryInfo& Info = LiveTemporaries.back();
|
||||
llvm::BasicBlock *CondEnd = 0;
|
||||
|
||||
EHCleanupBlock Cleanup(*this);
|
||||
|
||||
// If this is a conditional temporary, we need to check the condition
|
||||
// boolean and only call the destructor if it's true.
|
||||
if (Info.CondPtr) {
|
||||
llvm::BasicBlock *CondBlock = createBasicBlock("cond.dtor.call");
|
||||
CondEnd = createBasicBlock("cond.dtor.end");
|
||||
|
||||
llvm::Value *Cond = Builder.CreateLoad(Info.CondPtr);
|
||||
Builder.CreateCondBr(Cond, CondBlock, CondEnd);
|
||||
EmitBlock(CondBlock);
|
||||
}
|
||||
|
||||
EmitCXXDestructorCall(Info.Temporary->getDestructor(),
|
||||
Dtor_Complete, /*ForVirtualBase=*/false,
|
||||
Info.ThisPtr);
|
||||
|
||||
if (CondEnd) {
|
||||
// Reset the condition. to false.
|
||||
Builder.CreateStore(llvm::ConstantInt::getFalse(VMContext), Info.CondPtr);
|
||||
EmitBlock(CondEnd);
|
||||
}
|
||||
Cleanup.beginEHCleanup();
|
||||
EmitTemporaryCleanup(*this, Temporary, Ptr, CondPtr);
|
||||
}
|
||||
}
|
||||
|
||||
void CodeGenFunction::PopCXXTemporary() {
|
||||
const CXXLiveTemporaryInfo& Info = LiveTemporaries.back();
|
||||
|
||||
CleanupBlockInfo CleanupInfo = PopCleanupBlock();
|
||||
assert(CleanupInfo.CleanupBlock == Info.DtorBlock &&
|
||||
"Cleanup block mismatch!");
|
||||
assert(!CleanupInfo.SwitchBlock &&
|
||||
"Should not have a switch block for temporary cleanup!");
|
||||
assert(!CleanupInfo.EndBlock &&
|
||||
"Should not have an end block for temporary cleanup!");
|
||||
|
||||
llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
|
||||
if (CurBB && !CurBB->getTerminator() &&
|
||||
Info.DtorBlock->getNumUses() == 0) {
|
||||
CurBB->getInstList().splice(CurBB->end(), Info.DtorBlock->getInstList());
|
||||
delete Info.DtorBlock;
|
||||
} else
|
||||
EmitBlock(Info.DtorBlock);
|
||||
|
||||
llvm::BasicBlock *CondEnd = 0;
|
||||
|
||||
// If this is a conditional temporary, we need to check the condition
|
||||
// boolean and only call the destructor if it's true.
|
||||
if (Info.CondPtr) {
|
||||
llvm::BasicBlock *CondBlock = createBasicBlock("cond.dtor.call");
|
||||
CondEnd = createBasicBlock("cond.dtor.end");
|
||||
|
||||
llvm::Value *Cond = Builder.CreateLoad(Info.CondPtr);
|
||||
Builder.CreateCondBr(Cond, CondBlock, CondEnd);
|
||||
EmitBlock(CondBlock);
|
||||
}
|
||||
|
||||
EmitCXXDestructorCall(Info.Temporary->getDestructor(),
|
||||
Dtor_Complete, /*ForVirtualBase=*/false, Info.ThisPtr);
|
||||
|
||||
if (CondEnd) {
|
||||
// Reset the condition. to false.
|
||||
Builder.CreateStore(llvm::ConstantInt::getFalse(VMContext), Info.CondPtr);
|
||||
EmitBlock(CondEnd);
|
||||
}
|
||||
|
||||
LiveTemporaries.pop_back();
|
||||
}
|
||||
|
||||
RValue
|
||||
CodeGenFunction::EmitCXXExprWithTemporaries(const CXXExprWithTemporaries *E,
|
||||
llvm::Value *AggLoc,
|
||||
bool IsAggLocVolatile,
|
||||
bool IsInitializer) {
|
||||
// Keep track of the current cleanup stack depth.
|
||||
size_t CleanupStackDepth = CleanupEntries.size();
|
||||
(void) CleanupStackDepth;
|
||||
|
||||
RValue RV;
|
||||
|
||||
{
|
||||
CXXTemporariesCleanupScope Scope(*this);
|
||||
RunCleanupsScope Scope(*this);
|
||||
|
||||
RV = EmitAnyExpr(E->getSubExpr(), AggLoc, IsAggLocVolatile,
|
||||
/*IgnoreResult=*/false, IsInitializer);
|
||||
}
|
||||
assert(CleanupEntries.size() == CleanupStackDepth &&
|
||||
"Cleanup size mismatch!");
|
||||
|
||||
return RV;
|
||||
}
|
||||
|
||||
LValue CodeGenFunction::EmitCXXExprWithTemporariesLValue(
|
||||
const CXXExprWithTemporaries *E) {
|
||||
// Keep track of the current cleanup stack depth.
|
||||
size_t CleanupStackDepth = CleanupEntries.size();
|
||||
(void) CleanupStackDepth;
|
||||
|
||||
unsigned OldNumLiveTemporaries = LiveTemporaries.size();
|
||||
|
||||
LValue LV = EmitLValue(E->getSubExpr());
|
||||
|
||||
// Pop temporaries.
|
||||
while (LiveTemporaries.size() > OldNumLiveTemporaries)
|
||||
PopCXXTemporary();
|
||||
|
||||
assert(CleanupEntries.size() == CleanupStackDepth &&
|
||||
"Cleanup size mismatch!");
|
||||
LValue LV;
|
||||
{
|
||||
RunCleanupsScope Scope(*this);
|
||||
|
||||
LV = EmitLValue(E->getSubExpr());
|
||||
}
|
||||
return LV;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include "CodeGenFunction.h"
|
||||
#include "CodeGenModule.h"
|
||||
#include "CGDebugInfo.h"
|
||||
#include "CGException.h"
|
||||
#include "clang/Basic/TargetInfo.h"
|
||||
#include "clang/AST/APValue.h"
|
||||
#include "clang/AST/ASTContext.h"
|
||||
|
@ -30,10 +31,12 @@ CodeGenFunction::CodeGenFunction(CodeGenModule &cgm)
|
|||
: BlockFunction(cgm, *this, Builder), CGM(cgm),
|
||||
Target(CGM.getContext().Target),
|
||||
Builder(cgm.getModule().getContext()),
|
||||
DebugInfo(0), IndirectBranch(0),
|
||||
ExceptionSlot(0), DebugInfo(0), IndirectBranch(0),
|
||||
SwitchInsn(0), CaseRangeBlock(0), InvokeDest(0),
|
||||
DidCallStackSave(false), UnreachableBlock(0),
|
||||
CXXThisDecl(0), CXXThisValue(0), CXXVTTDecl(0), CXXVTTValue(0),
|
||||
ConditionalBranchLevel(0), TerminateHandler(0), TrapBB(0) {
|
||||
ConditionalBranchLevel(0), TerminateLandingPad(0), TerminateHandler(0),
|
||||
TrapBB(0) {
|
||||
|
||||
// Get some frequently used types.
|
||||
LLVMPointerWidth = Target.getPointerWidth(0);
|
||||
|
@ -52,14 +55,6 @@ ASTContext &CodeGenFunction::getContext() const {
|
|||
}
|
||||
|
||||
|
||||
llvm::BasicBlock *CodeGenFunction::getBasicBlockForLabel(const LabelStmt *S) {
|
||||
llvm::BasicBlock *&BB = LabelMap[S];
|
||||
if (BB) return BB;
|
||||
|
||||
// Create, but don't insert, the new block.
|
||||
return BB = createBasicBlock(S->getName());
|
||||
}
|
||||
|
||||
llvm::Value *CodeGenFunction::GetAddrOfLocalVar(const VarDecl *VD) {
|
||||
llvm::Value *Res = LocalDeclMap[VD];
|
||||
assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!");
|
||||
|
@ -94,25 +89,26 @@ void CodeGenFunction::EmitReturnBlock() {
|
|||
|
||||
// We have a valid insert point, reuse it if it is empty or there are no
|
||||
// explicit jumps to the return block.
|
||||
if (CurBB->empty() || ReturnBlock->use_empty()) {
|
||||
ReturnBlock->replaceAllUsesWith(CurBB);
|
||||
delete ReturnBlock;
|
||||
if (CurBB->empty() || ReturnBlock.Block->use_empty()) {
|
||||
ReturnBlock.Block->replaceAllUsesWith(CurBB);
|
||||
delete ReturnBlock.Block;
|
||||
} else
|
||||
EmitBlock(ReturnBlock);
|
||||
EmitBlock(ReturnBlock.Block);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, if the return block is the target of a single direct
|
||||
// branch then we can just put the code in that block instead. This
|
||||
// cleans up functions which started with a unified return block.
|
||||
if (ReturnBlock->hasOneUse()) {
|
||||
if (ReturnBlock.Block->hasOneUse()) {
|
||||
llvm::BranchInst *BI =
|
||||
dyn_cast<llvm::BranchInst>(*ReturnBlock->use_begin());
|
||||
if (BI && BI->isUnconditional() && BI->getSuccessor(0) == ReturnBlock) {
|
||||
dyn_cast<llvm::BranchInst>(*ReturnBlock.Block->use_begin());
|
||||
if (BI && BI->isUnconditional() &&
|
||||
BI->getSuccessor(0) == ReturnBlock.Block) {
|
||||
// Reset insertion point and delete the branch.
|
||||
Builder.SetInsertPoint(BI->getParent());
|
||||
BI->eraseFromParent();
|
||||
delete ReturnBlock;
|
||||
delete ReturnBlock.Block;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -121,16 +117,19 @@ void CodeGenFunction::EmitReturnBlock() {
|
|||
// unless it has uses. However, we still need a place to put the debug
|
||||
// region.end for now.
|
||||
|
||||
EmitBlock(ReturnBlock);
|
||||
EmitBlock(ReturnBlock.Block);
|
||||
}
|
||||
|
||||
static void EmitIfUsed(CodeGenFunction &CGF, llvm::BasicBlock *BB) {
|
||||
if (!BB) return;
|
||||
if (!BB->use_empty())
|
||||
return CGF.CurFn->getBasicBlockList().push_back(BB);
|
||||
delete BB;
|
||||
}
|
||||
|
||||
void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
|
||||
assert(BreakContinueStack.empty() &&
|
||||
"mismatched push/pop in break/continue stack!");
|
||||
assert(BlockScopes.empty() &&
|
||||
"did not remove all blocks from block scope map!");
|
||||
assert(CleanupEntries.empty() &&
|
||||
"mismatched push/pop in cleanup stack!");
|
||||
|
||||
// Emit function epilog (to return).
|
||||
EmitReturnBlock();
|
||||
|
@ -146,6 +145,9 @@ void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
|
|||
EmitFunctionEpilog(*CurFnInfo);
|
||||
EmitEndEHSpec(CurCodeDecl);
|
||||
|
||||
assert(EHStack.empty() &&
|
||||
"did not remove all scopes from cleanup stack!");
|
||||
|
||||
// If someone did an indirect goto, emit the indirect goto block at the end of
|
||||
// the function.
|
||||
if (IndirectBranch) {
|
||||
|
@ -167,6 +169,10 @@ void CodeGenFunction::FinishFunction(SourceLocation EndLoc) {
|
|||
PN->eraseFromParent();
|
||||
}
|
||||
}
|
||||
|
||||
EmitIfUsed(*this, TerminateLandingPad);
|
||||
EmitIfUsed(*this, TerminateHandler);
|
||||
EmitIfUsed(*this, UnreachableBlock);
|
||||
}
|
||||
|
||||
/// ShouldInstrumentFunction - Return true if the current function should be
|
||||
|
@ -241,7 +247,7 @@ void CodeGenFunction::StartFunction(GlobalDecl GD, QualType RetTy,
|
|||
if (Builder.isNamePreserving())
|
||||
AllocaInsertPt->setName("allocapt");
|
||||
|
||||
ReturnBlock = createBasicBlock("return");
|
||||
ReturnBlock = getJumpDestInCurrentScope("return");
|
||||
|
||||
Builder.SetInsertPoint(EntryBB);
|
||||
|
||||
|
@ -576,7 +582,7 @@ llvm::BlockAddress *CodeGenFunction::GetAddrOfLabel(const LabelStmt *L) {
|
|||
if (IndirectBranch == 0)
|
||||
GetIndirectGotoBlock();
|
||||
|
||||
llvm::BasicBlock *BB = getBasicBlockForLabel(L);
|
||||
llvm::BasicBlock *BB = getJumpDestForLabel(L).Block;
|
||||
|
||||
// Make sure the indirect branch includes all of the address-taken blocks.
|
||||
IndirectBranch->addDestination(BB);
|
||||
|
@ -653,223 +659,386 @@ llvm::Value* CodeGenFunction::EmitVAListRef(const Expr* E) {
|
|||
return EmitLValue(E).getAddress();
|
||||
}
|
||||
|
||||
void CodeGenFunction::PushCleanupBlock(llvm::BasicBlock *CleanupEntryBlock,
|
||||
llvm::BasicBlock *CleanupExitBlock,
|
||||
llvm::BasicBlock *PreviousInvokeDest,
|
||||
bool EHOnly) {
|
||||
CleanupEntries.push_back(CleanupEntry(CleanupEntryBlock, CleanupExitBlock,
|
||||
PreviousInvokeDest, EHOnly));
|
||||
/// Pops cleanup blocks until the given savepoint is reached.
|
||||
void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
|
||||
assert(Old.isValid());
|
||||
|
||||
EHScopeStack::iterator E = EHStack.find(Old);
|
||||
while (EHStack.begin() != E)
|
||||
PopCleanupBlock();
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitCleanupBlocks(size_t OldCleanupStackSize) {
|
||||
assert(CleanupEntries.size() >= OldCleanupStackSize &&
|
||||
"Cleanup stack mismatch!");
|
||||
/// Destroys a cleanup if it was unused.
|
||||
static void DestroyCleanup(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
assert(Entry->use_empty() && "destroying cleanup with uses!");
|
||||
assert(Exit->getTerminator() == 0 &&
|
||||
"exit has terminator but entry has no predecessors!");
|
||||
|
||||
while (CleanupEntries.size() > OldCleanupStackSize)
|
||||
EmitCleanupBlock();
|
||||
// This doesn't always remove the entire cleanup, but it's much
|
||||
// safer as long as we don't know what blocks belong to the cleanup.
|
||||
// A *much* better approach if we care about this inefficiency would
|
||||
// be to lazily emit the cleanup.
|
||||
|
||||
// If the exit block is distinct from the entry, give it a branch to
|
||||
// an unreachable destination. This preserves the well-formedness
|
||||
// of the IR.
|
||||
if (Entry != Exit)
|
||||
llvm::BranchInst::Create(CGF.getUnreachableBlock(), Exit);
|
||||
|
||||
assert(!Entry->getParent() && "cleanup entry already positioned?");
|
||||
delete Entry;
|
||||
}
|
||||
|
||||
CodeGenFunction::CleanupBlockInfo CodeGenFunction::PopCleanupBlock() {
|
||||
CleanupEntry &CE = CleanupEntries.back();
|
||||
|
||||
llvm::BasicBlock *CleanupEntryBlock = CE.CleanupEntryBlock;
|
||||
|
||||
std::vector<llvm::BasicBlock *> Blocks;
|
||||
std::swap(Blocks, CE.Blocks);
|
||||
|
||||
std::vector<llvm::BranchInst *> BranchFixups;
|
||||
std::swap(BranchFixups, CE.BranchFixups);
|
||||
|
||||
bool EHOnly = CE.EHOnly;
|
||||
|
||||
setInvokeDest(CE.PreviousInvokeDest);
|
||||
|
||||
CleanupEntries.pop_back();
|
||||
|
||||
// Check if any branch fixups pointed to the scope we just popped. If so,
|
||||
// we can remove them.
|
||||
for (size_t i = 0, e = BranchFixups.size(); i != e; ++i) {
|
||||
llvm::BasicBlock *Dest = BranchFixups[i]->getSuccessor(0);
|
||||
BlockScopeMap::iterator I = BlockScopes.find(Dest);
|
||||
|
||||
if (I == BlockScopes.end())
|
||||
continue;
|
||||
|
||||
assert(I->second <= CleanupEntries.size() && "Invalid branch fixup!");
|
||||
|
||||
if (I->second == CleanupEntries.size()) {
|
||||
// We don't need to do this branch fixup.
|
||||
BranchFixups[i] = BranchFixups.back();
|
||||
BranchFixups.pop_back();
|
||||
i--;
|
||||
e--;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
llvm::BasicBlock *SwitchBlock = CE.CleanupExitBlock;
|
||||
llvm::BasicBlock *EndBlock = 0;
|
||||
if (!BranchFixups.empty()) {
|
||||
if (!SwitchBlock)
|
||||
SwitchBlock = createBasicBlock("cleanup.switch");
|
||||
EndBlock = createBasicBlock("cleanup.end");
|
||||
|
||||
llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
|
||||
|
||||
Builder.SetInsertPoint(SwitchBlock);
|
||||
|
||||
llvm::Value *DestCodePtr = CreateTempAlloca(Int32Ty, "cleanup.dst");
|
||||
llvm::Value *DestCode = Builder.CreateLoad(DestCodePtr, "tmp");
|
||||
|
||||
// Create a switch instruction to determine where to jump next.
|
||||
llvm::SwitchInst *SI = Builder.CreateSwitch(DestCode, EndBlock,
|
||||
BranchFixups.size());
|
||||
|
||||
// Restore the current basic block (if any)
|
||||
if (CurBB) {
|
||||
Builder.SetInsertPoint(CurBB);
|
||||
|
||||
// If we had a current basic block, we also need to emit an instruction
|
||||
// to initialize the cleanup destination.
|
||||
Builder.CreateStore(llvm::Constant::getNullValue(Int32Ty),
|
||||
DestCodePtr);
|
||||
} else
|
||||
Builder.ClearInsertionPoint();
|
||||
|
||||
for (size_t i = 0, e = BranchFixups.size(); i != e; ++i) {
|
||||
llvm::BranchInst *BI = BranchFixups[i];
|
||||
llvm::BasicBlock *Dest = BI->getSuccessor(0);
|
||||
|
||||
// Fixup the branch instruction to point to the cleanup block.
|
||||
BI->setSuccessor(0, CleanupEntryBlock);
|
||||
|
||||
if (CleanupEntries.empty()) {
|
||||
llvm::ConstantInt *ID;
|
||||
|
||||
// Check if we already have a destination for this block.
|
||||
if (Dest == SI->getDefaultDest())
|
||||
ID = llvm::ConstantInt::get(Int32Ty, 0);
|
||||
else {
|
||||
ID = SI->findCaseDest(Dest);
|
||||
if (!ID) {
|
||||
// No code found, get a new unique one by using the number of
|
||||
// switch successors.
|
||||
ID = llvm::ConstantInt::get(Int32Ty, SI->getNumSuccessors());
|
||||
SI->addCase(ID, Dest);
|
||||
}
|
||||
}
|
||||
|
||||
// Store the jump destination before the branch instruction.
|
||||
new llvm::StoreInst(ID, DestCodePtr, BI);
|
||||
} else {
|
||||
// We need to jump through another cleanup block. Create a pad block
|
||||
// with a branch instruction that jumps to the final destination and add
|
||||
// it as a branch fixup to the current cleanup scope.
|
||||
|
||||
// Create the pad block.
|
||||
llvm::BasicBlock *CleanupPad = createBasicBlock("cleanup.pad", CurFn);
|
||||
|
||||
// Create a unique case ID.
|
||||
llvm::ConstantInt *ID
|
||||
= llvm::ConstantInt::get(Int32Ty, SI->getNumSuccessors());
|
||||
|
||||
// Store the jump destination before the branch instruction.
|
||||
new llvm::StoreInst(ID, DestCodePtr, BI);
|
||||
|
||||
// Add it as the destination.
|
||||
SI->addCase(ID, CleanupPad);
|
||||
|
||||
// Create the branch to the final destination.
|
||||
llvm::BranchInst *BI = llvm::BranchInst::Create(Dest);
|
||||
CleanupPad->getInstList().push_back(BI);
|
||||
|
||||
// And add it as a branch fixup.
|
||||
CleanupEntries.back().BranchFixups.push_back(BI);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove all blocks from the block scope map.
|
||||
for (size_t i = 0, e = Blocks.size(); i != e; ++i) {
|
||||
assert(BlockScopes.count(Blocks[i]) &&
|
||||
"Did not find block in scope map!");
|
||||
|
||||
BlockScopes.erase(Blocks[i]);
|
||||
}
|
||||
|
||||
return CleanupBlockInfo(CleanupEntryBlock, SwitchBlock, EndBlock, EHOnly);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitCleanupBlock() {
|
||||
CleanupBlockInfo Info = PopCleanupBlock();
|
||||
|
||||
if (Info.EHOnly) {
|
||||
// FIXME: Add this to the exceptional edge
|
||||
if (Info.CleanupBlock->getNumUses() == 0)
|
||||
delete Info.CleanupBlock;
|
||||
/// Creates a switch instruction to thread branches out of the given
|
||||
/// block (which is the exit block of a cleanup).
|
||||
static void CreateCleanupSwitch(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Block) {
|
||||
if (Block->getTerminator()) {
|
||||
assert(isa<llvm::SwitchInst>(Block->getTerminator()) &&
|
||||
"cleanup block already has a terminator, but it isn't a switch");
|
||||
return;
|
||||
}
|
||||
|
||||
// Scrub debug location info.
|
||||
for (llvm::BasicBlock::iterator LBI = Info.CleanupBlock->begin(),
|
||||
LBE = Info.CleanupBlock->end(); LBI != LBE; ++LBI)
|
||||
Builder.SetInstDebugLocation(LBI);
|
||||
llvm::Value *DestCodePtr
|
||||
= CGF.CreateTempAlloca(CGF.Builder.getInt32Ty(), "cleanup.dst");
|
||||
CGBuilderTy Builder(Block);
|
||||
llvm::Value *DestCode = Builder.CreateLoad(DestCodePtr, "tmp");
|
||||
|
||||
llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
|
||||
if (CurBB && !CurBB->getTerminator() &&
|
||||
Info.CleanupBlock->getNumUses() == 0) {
|
||||
CurBB->getInstList().splice(CurBB->end(), Info.CleanupBlock->getInstList());
|
||||
delete Info.CleanupBlock;
|
||||
} else
|
||||
EmitBlock(Info.CleanupBlock);
|
||||
|
||||
if (Info.SwitchBlock)
|
||||
EmitBlock(Info.SwitchBlock);
|
||||
if (Info.EndBlock)
|
||||
EmitBlock(Info.EndBlock);
|
||||
// Create a switch instruction to determine where to jump next.
|
||||
Builder.CreateSwitch(DestCode, CGF.getUnreachableBlock());
|
||||
}
|
||||
|
||||
void CodeGenFunction::AddBranchFixup(llvm::BranchInst *BI) {
|
||||
assert(!CleanupEntries.empty() &&
|
||||
"Trying to add branch fixup without cleanup block!");
|
||||
/// Attempts to reduce a cleanup's entry block to a fallthrough. This
|
||||
/// is basically llvm::MergeBlockIntoPredecessor, except
|
||||
/// simplified/optimized for the tighter constraints on cleanup
|
||||
/// blocks.
|
||||
static void SimplifyCleanupEntry(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry) {
|
||||
llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
|
||||
if (!Pred) return;
|
||||
|
||||
// FIXME: We could be more clever here and check if there's already a branch
|
||||
// fixup for this destination and recycle it.
|
||||
CleanupEntries.back().BranchFixups.push_back(BI);
|
||||
llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
|
||||
if (!Br || Br->isConditional()) return;
|
||||
assert(Br->getSuccessor(0) == Entry);
|
||||
|
||||
// If we were previously inserting at the end of the cleanup entry
|
||||
// block, we'll need to continue inserting at the end of the
|
||||
// predecessor.
|
||||
bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
|
||||
assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
|
||||
|
||||
// Kill the branch.
|
||||
Br->eraseFromParent();
|
||||
|
||||
// Merge the blocks.
|
||||
Pred->getInstList().splice(Pred->end(), Entry->getInstList());
|
||||
|
||||
// Kill the entry block.
|
||||
Entry->eraseFromParent();
|
||||
|
||||
if (WasInsertBlock)
|
||||
CGF.Builder.SetInsertPoint(Pred);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitBranchThroughCleanup(llvm::BasicBlock *Dest) {
|
||||
/// Attempts to reduce an cleanup's exit switch to an unconditional
|
||||
/// branch.
|
||||
static void SimplifyCleanupExit(llvm::BasicBlock *Exit) {
|
||||
llvm::TerminatorInst *Terminator = Exit->getTerminator();
|
||||
assert(Terminator && "completed cleanup exit has no terminator");
|
||||
|
||||
llvm::SwitchInst *Switch = dyn_cast<llvm::SwitchInst>(Terminator);
|
||||
if (!Switch) return;
|
||||
if (Switch->getNumCases() != 2) return; // default + 1
|
||||
|
||||
llvm::LoadInst *Cond = cast<llvm::LoadInst>(Switch->getCondition());
|
||||
llvm::AllocaInst *CondVar = cast<llvm::AllocaInst>(Cond->getPointerOperand());
|
||||
|
||||
// Replace the switch instruction with an unconditional branch.
|
||||
llvm::BasicBlock *Dest = Switch->getSuccessor(1); // default is 0
|
||||
Switch->eraseFromParent();
|
||||
llvm::BranchInst::Create(Dest, Exit);
|
||||
|
||||
// Delete all uses of the condition variable.
|
||||
Cond->eraseFromParent();
|
||||
while (!CondVar->use_empty())
|
||||
cast<llvm::StoreInst>(*CondVar->use_begin())->eraseFromParent();
|
||||
|
||||
// Delete the condition variable itself.
|
||||
CondVar->eraseFromParent();
|
||||
}
|
||||
|
||||
/// Threads a branch fixup through a cleanup block.
|
||||
static void ThreadFixupThroughCleanup(CodeGenFunction &CGF,
|
||||
BranchFixup &Fixup,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
if (!Exit->getTerminator())
|
||||
CreateCleanupSwitch(CGF, Exit);
|
||||
|
||||
// Find the switch and its destination index alloca.
|
||||
llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Exit->getTerminator());
|
||||
llvm::Value *DestCodePtr =
|
||||
cast<llvm::LoadInst>(Switch->getCondition())->getPointerOperand();
|
||||
|
||||
// Compute the index of the new case we're adding to the switch.
|
||||
unsigned Index = Switch->getNumCases();
|
||||
|
||||
const llvm::IntegerType *i32 = llvm::Type::getInt32Ty(CGF.getLLVMContext());
|
||||
llvm::ConstantInt *IndexV = llvm::ConstantInt::get(i32, Index);
|
||||
|
||||
// Set the index in the origin block.
|
||||
new llvm::StoreInst(IndexV, DestCodePtr, Fixup.Origin);
|
||||
|
||||
// Add a case to the switch.
|
||||
Switch->addCase(IndexV, Fixup.Destination);
|
||||
|
||||
// Change the last branch to point to the cleanup entry block.
|
||||
Fixup.LatestBranch->setSuccessor(Fixup.LatestBranchIndex, Entry);
|
||||
|
||||
// And finally, update the fixup.
|
||||
Fixup.LatestBranch = Switch;
|
||||
Fixup.LatestBranchIndex = Index;
|
||||
}
|
||||
|
||||
/// Try to simplify both the entry and exit edges of a cleanup.
|
||||
static void SimplifyCleanupEdges(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
|
||||
// Given their current implementations, it's important to run these
|
||||
// in this order: SimplifyCleanupEntry will delete Entry if it can
|
||||
// be merged into its predecessor, which will then break
|
||||
// SimplifyCleanupExit if (as is common) Entry == Exit.
|
||||
|
||||
SimplifyCleanupExit(Exit);
|
||||
SimplifyCleanupEntry(CGF, Entry);
|
||||
}
|
||||
|
||||
/// Pops a cleanup block. If the block includes a normal cleanup, the
|
||||
/// current insertion point is threaded through the cleanup, as are
|
||||
/// any branch fixups on the cleanup.
|
||||
void CodeGenFunction::PopCleanupBlock() {
|
||||
assert(!EHStack.empty() && "cleanup stack is empty!");
|
||||
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
|
||||
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
|
||||
|
||||
// Handle the EH cleanup if (1) there is one and (2) it's different
|
||||
// from the normal cleanup.
|
||||
if (Scope.isEHCleanup() &&
|
||||
Scope.getEHEntry() != Scope.getNormalEntry()) {
|
||||
llvm::BasicBlock *EHEntry = Scope.getEHEntry();
|
||||
llvm::BasicBlock *EHExit = Scope.getEHExit();
|
||||
|
||||
if (EHEntry->use_empty()) {
|
||||
DestroyCleanup(*this, EHEntry, EHExit);
|
||||
} else {
|
||||
// TODO: this isn't really the ideal location to put this EH
|
||||
// cleanup, but lazy emission is a better solution than trying
|
||||
// to pick a better spot.
|
||||
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
|
||||
EmitBlock(EHEntry);
|
||||
Builder.restoreIP(SavedIP);
|
||||
|
||||
SimplifyCleanupEdges(*this, EHEntry, EHExit);
|
||||
}
|
||||
}
|
||||
|
||||
// If we only have an EH cleanup, we don't really need to do much
|
||||
// here. Branch fixups just naturally drop down to the enclosing
|
||||
// cleanup scope.
|
||||
if (!Scope.isNormalCleanup()) {
|
||||
EHStack.popCleanup();
|
||||
assert(EHStack.getNumBranchFixups() == 0 || EHStack.hasNormalCleanups());
|
||||
return;
|
||||
}
|
||||
|
||||
// Check whether the scope has any fixups that need to be threaded.
|
||||
unsigned FixupDepth = Scope.getFixupDepth();
|
||||
bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
|
||||
|
||||
// Grab the entry and exit blocks.
|
||||
llvm::BasicBlock *Entry = Scope.getNormalEntry();
|
||||
llvm::BasicBlock *Exit = Scope.getNormalExit();
|
||||
|
||||
// Check whether anything's been threaded through the cleanup already.
|
||||
assert((Exit->getTerminator() == 0) == Entry->use_empty() &&
|
||||
"cleanup entry/exit mismatch");
|
||||
bool HasExistingBranches = !Entry->use_empty();
|
||||
|
||||
// Check whether we need to emit a "fallthrough" branch through the
|
||||
// cleanup for the current insertion point.
|
||||
llvm::BasicBlock *FallThrough = Builder.GetInsertBlock();
|
||||
if (FallThrough && FallThrough->getTerminator())
|
||||
FallThrough = 0;
|
||||
|
||||
// If *nothing* is using the cleanup, kill it.
|
||||
if (!FallThrough && !HasFixups && !HasExistingBranches) {
|
||||
EHStack.popCleanup();
|
||||
DestroyCleanup(*this, Entry, Exit);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, add the block to the function.
|
||||
EmitBlock(Entry);
|
||||
|
||||
if (FallThrough)
|
||||
Builder.SetInsertPoint(Exit);
|
||||
else
|
||||
Builder.ClearInsertionPoint();
|
||||
|
||||
// Fast case: if we don't have to add any fixups, and either
|
||||
// we don't have a fallthrough or the cleanup wasn't previously
|
||||
// used, then the setup above is sufficient.
|
||||
if (!HasFixups) {
|
||||
if (!FallThrough) {
|
||||
assert(HasExistingBranches && "no reason for cleanup but didn't kill before");
|
||||
EHStack.popCleanup();
|
||||
SimplifyCleanupEdges(*this, Entry, Exit);
|
||||
return;
|
||||
} else if (!HasExistingBranches) {
|
||||
assert(FallThrough && "no reason for cleanup but didn't kill before");
|
||||
// We can't simplify the exit edge in this case because we're
|
||||
// already inserting at the end of the exit block.
|
||||
EHStack.popCleanup();
|
||||
SimplifyCleanupEntry(*this, Entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise we're going to have to thread things through the cleanup.
|
||||
llvm::SmallVector<BranchFixup*, 8> Fixups;
|
||||
|
||||
// Synthesize a fixup for the current insertion point.
|
||||
BranchFixup Cur;
|
||||
if (FallThrough) {
|
||||
Cur.Destination = createBasicBlock("cleanup.cont");
|
||||
Cur.LatestBranch = FallThrough->getTerminator();
|
||||
Cur.LatestBranchIndex = 0;
|
||||
Cur.Origin = Cur.LatestBranch;
|
||||
|
||||
// Restore fixup invariant. EmitBlock added a branch to the cleanup
|
||||
// which we need to redirect to the destination.
|
||||
cast<llvm::BranchInst>(Cur.LatestBranch)->setSuccessor(0, Cur.Destination);
|
||||
|
||||
Fixups.push_back(&Cur);
|
||||
} else {
|
||||
Cur.Destination = 0;
|
||||
}
|
||||
|
||||
// Collect any "real" fixups we need to thread.
|
||||
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
|
||||
I != E; ++I)
|
||||
if (EHStack.getBranchFixup(I).Destination)
|
||||
Fixups.push_back(&EHStack.getBranchFixup(I));
|
||||
|
||||
assert(!Fixups.empty() && "no fixups, invariants broken!");
|
||||
|
||||
// If there's only a single fixup to thread through, do so with
|
||||
// unconditional branches. This only happens if there's a single
|
||||
// branch and no fallthrough.
|
||||
if (Fixups.size() == 1 && !HasExistingBranches) {
|
||||
Fixups[0]->LatestBranch->setSuccessor(Fixups[0]->LatestBranchIndex, Entry);
|
||||
llvm::BranchInst *Br =
|
||||
llvm::BranchInst::Create(Fixups[0]->Destination, Exit);
|
||||
Fixups[0]->LatestBranch = Br;
|
||||
Fixups[0]->LatestBranchIndex = 0;
|
||||
|
||||
// Otherwise, force a switch statement and thread everything through
|
||||
// the switch.
|
||||
} else {
|
||||
CreateCleanupSwitch(*this, Exit);
|
||||
for (unsigned I = 0, E = Fixups.size(); I != E; ++I)
|
||||
ThreadFixupThroughCleanup(*this, *Fixups[I], Entry, Exit);
|
||||
}
|
||||
|
||||
// Emit the fallthrough destination block if necessary.
|
||||
if (Cur.Destination)
|
||||
EmitBlock(Cur.Destination);
|
||||
|
||||
// We're finally done with the cleanup.
|
||||
EHStack.popCleanup();
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
|
||||
if (!HaveInsertPoint())
|
||||
return;
|
||||
|
||||
llvm::BranchInst* BI = Builder.CreateBr(Dest);
|
||||
// Create the branch.
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
|
||||
|
||||
// If we're not in a cleanup scope, we don't need to worry about
|
||||
// fixups.
|
||||
if (!EHStack.hasNormalCleanups()) {
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize a fixup.
|
||||
BranchFixup Fixup;
|
||||
Fixup.Destination = Dest.Block;
|
||||
Fixup.Origin = BI;
|
||||
Fixup.LatestBranch = BI;
|
||||
Fixup.LatestBranchIndex = 0;
|
||||
|
||||
// If we can't resolve the destination cleanup scope, just add this
|
||||
// to the current cleanup scope.
|
||||
if (!Dest.ScopeDepth.isValid()) {
|
||||
EHStack.addBranchFixup() = Fixup;
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isNormalCleanup())
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Scope.getNormalEntry(),
|
||||
Scope.getNormalExit());
|
||||
}
|
||||
}
|
||||
|
||||
Builder.ClearInsertionPoint();
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
|
||||
if (!HaveInsertPoint())
|
||||
return;
|
||||
|
||||
// Create the branch.
|
||||
llvm::BranchInst *BI = Builder.CreateBr(Dest.Block);
|
||||
|
||||
// If we're not in a cleanup scope, we don't need to worry about
|
||||
// fixups.
|
||||
if (!EHStack.hasEHCleanups()) {
|
||||
Builder.ClearInsertionPoint();
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize a fixup.
|
||||
BranchFixup Fixup;
|
||||
Fixup.Destination = Dest.Block;
|
||||
Fixup.Origin = BI;
|
||||
Fixup.LatestBranch = BI;
|
||||
Fixup.LatestBranchIndex = 0;
|
||||
|
||||
// We should never get invalid scope depths for these: invalid scope
|
||||
// depths only arise for as-yet-unemitted labels, and we can't do an
|
||||
// EH-unwind to one of those.
|
||||
assert(Dest.ScopeDepth.isValid() && "invalid scope depth on EH dest?");
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isEHCleanup())
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Scope.getEHEntry(),
|
||||
Scope.getEHExit());
|
||||
}
|
||||
}
|
||||
|
||||
Builder.ClearInsertionPoint();
|
||||
|
||||
// The stack is empty, no need to do any cleanup.
|
||||
if (CleanupEntries.empty())
|
||||
return;
|
||||
|
||||
if (!Dest->getParent()) {
|
||||
// We are trying to branch to a block that hasn't been inserted yet.
|
||||
AddBranchFixup(BI);
|
||||
return;
|
||||
}
|
||||
|
||||
BlockScopeMap::iterator I = BlockScopes.find(Dest);
|
||||
if (I == BlockScopes.end()) {
|
||||
// We are trying to jump to a block that is outside of any cleanup scope.
|
||||
AddBranchFixup(BI);
|
||||
return;
|
||||
}
|
||||
|
||||
assert(I->second < CleanupEntries.size() &&
|
||||
"Trying to branch into cleanup region");
|
||||
|
||||
if (I->second == CleanupEntries.size() - 1) {
|
||||
// We have a branch to a block in the same scope.
|
||||
return;
|
||||
}
|
||||
|
||||
AddBranchFixup(BI);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ namespace llvm {
|
|||
class SwitchInst;
|
||||
class Twine;
|
||||
class Value;
|
||||
class CallSite;
|
||||
}
|
||||
|
||||
namespace clang {
|
||||
|
@ -69,12 +70,244 @@ namespace CodeGen {
|
|||
class CGRecordLayout;
|
||||
class CGBlockInfo;
|
||||
|
||||
/// A branch fixup. These are required when emitting a goto to a
|
||||
/// label which hasn't been emitted yet. The goto is optimistically
|
||||
/// emitted as a branch to the basic block for the label, and (if it
|
||||
/// occurs in a scope with non-trivial cleanups) a fixup is added to
|
||||
/// the innermost cleanup. When a (normal) cleanup is popped, any
|
||||
/// unresolved fixups in that scope are threaded through the cleanup.
|
||||
struct BranchFixup {
|
||||
/// The origin of the branch. Any switch-index stores required by
|
||||
/// cleanup threading are added before this instruction.
|
||||
llvm::Instruction *Origin;
|
||||
|
||||
/// The destination of the branch.
|
||||
///
|
||||
/// This can be set to null to indicate that this fixup was
|
||||
/// successfully resolved.
|
||||
llvm::BasicBlock *Destination;
|
||||
|
||||
/// The last branch of the fixup. It is an invariant that
|
||||
/// LatestBranch->getSuccessor(LatestBranchIndex) == Destination.
|
||||
///
|
||||
/// The branch is always either a BranchInst or a SwitchInst.
|
||||
llvm::TerminatorInst *LatestBranch;
|
||||
unsigned LatestBranchIndex;
|
||||
};
|
||||
|
||||
/// A stack of scopes which respond to exceptions, including cleanups
|
||||
/// and catch blocks.
|
||||
class EHScopeStack {
|
||||
public:
|
||||
/// A saved depth on the scope stack. This is necessary because
|
||||
/// pushing scopes onto the stack invalidates iterators.
|
||||
class stable_iterator {
|
||||
friend class EHScopeStack;
|
||||
|
||||
/// Offset from StartOfData to EndOfBuffer.
|
||||
ptrdiff_t Size;
|
||||
|
||||
stable_iterator(ptrdiff_t Size) : Size(Size) {}
|
||||
|
||||
public:
|
||||
static stable_iterator invalid() { return stable_iterator(-1); }
|
||||
stable_iterator() : Size(-1) {}
|
||||
|
||||
bool isValid() const { return Size >= 0; }
|
||||
|
||||
friend bool operator==(stable_iterator A, stable_iterator B) {
|
||||
return A.Size == B.Size;
|
||||
}
|
||||
friend bool operator!=(stable_iterator A, stable_iterator B) {
|
||||
return A.Size != B.Size;
|
||||
}
|
||||
};
|
||||
|
||||
private:
|
||||
// The implementation for this class is in CGException.h and
|
||||
// CGException.cpp; the definition is here because it's used as a
|
||||
// member of CodeGenFunction.
|
||||
|
||||
/// The start of the scope-stack buffer, i.e. the allocated pointer
|
||||
/// for the buffer. All of these pointers are either simultaneously
|
||||
/// null or simultaneously valid.
|
||||
char *StartOfBuffer;
|
||||
|
||||
/// The end of the buffer.
|
||||
char *EndOfBuffer;
|
||||
|
||||
/// The first valid entry in the buffer.
|
||||
char *StartOfData;
|
||||
|
||||
/// The innermost normal cleanup on the stack.
|
||||
stable_iterator InnermostNormalCleanup;
|
||||
|
||||
/// The innermost EH cleanup on the stack.
|
||||
stable_iterator InnermostEHCleanup;
|
||||
|
||||
/// The number of catches on the stack.
|
||||
unsigned CatchDepth;
|
||||
|
||||
/// The current set of branch fixups. A branch fixup is a jump to
|
||||
/// an as-yet unemitted label, i.e. a label for which we don't yet
|
||||
/// know the EH stack depth. Whenever we pop a cleanup, we have
|
||||
/// to thread all the current branch fixups through it.
|
||||
///
|
||||
/// Fixups are recorded as the Use of the respective branch or
|
||||
/// switch statement. The use points to the final destination.
|
||||
/// When popping out of a cleanup, these uses are threaded through
|
||||
/// the cleanup and adjusted to point to the new cleanup.
|
||||
///
|
||||
/// Note that branches are allowed to jump into protected scopes
|
||||
/// in certain situations; e.g. the following code is legal:
|
||||
/// struct A { ~A(); }; // trivial ctor, non-trivial dtor
|
||||
/// goto foo;
|
||||
/// A a;
|
||||
/// foo:
|
||||
/// bar();
|
||||
llvm::SmallVector<BranchFixup, 8> BranchFixups;
|
||||
|
||||
char *allocate(size_t Size);
|
||||
|
||||
void popNullFixups();
|
||||
|
||||
public:
|
||||
EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
|
||||
InnermostNormalCleanup(stable_end()),
|
||||
InnermostEHCleanup(stable_end()),
|
||||
CatchDepth(0) {}
|
||||
~EHScopeStack() { delete[] StartOfBuffer; }
|
||||
|
||||
/// Push a cleanup on the stack.
|
||||
void pushCleanup(llvm::BasicBlock *NormalEntry,
|
||||
llvm::BasicBlock *NormalExit,
|
||||
llvm::BasicBlock *EHEntry,
|
||||
llvm::BasicBlock *EHExit);
|
||||
|
||||
/// Pops a cleanup scope off the stack. This should only be called
|
||||
/// by CodeGenFunction::PopCleanupBlock.
|
||||
void popCleanup();
|
||||
|
||||
/// Push a set of catch handlers on the stack. The catch is
|
||||
/// uninitialized and will need to have the given number of handlers
|
||||
/// set on it.
|
||||
class EHCatchScope *pushCatch(unsigned NumHandlers);
|
||||
|
||||
/// Pops a catch scope off the stack.
|
||||
void popCatch();
|
||||
|
||||
/// Push an exceptions filter on the stack.
|
||||
class EHFilterScope *pushFilter(unsigned NumFilters);
|
||||
|
||||
/// Pops an exceptions filter off the stack.
|
||||
void popFilter();
|
||||
|
||||
/// Push a terminate handler on the stack.
|
||||
void pushTerminate();
|
||||
|
||||
/// Pops a terminate handler off the stack.
|
||||
void popTerminate();
|
||||
|
||||
/// Determines whether the exception-scopes stack is empty.
|
||||
bool empty() const { return StartOfData == EndOfBuffer; }
|
||||
|
||||
bool requiresLandingPad() const {
|
||||
return (CatchDepth || hasEHCleanups());
|
||||
}
|
||||
|
||||
/// Determines whether there are any normal cleanups on the stack.
|
||||
bool hasNormalCleanups() const {
|
||||
return InnermostNormalCleanup != stable_end();
|
||||
}
|
||||
|
||||
/// Returns the innermost normal cleanup on the stack, or
|
||||
/// stable_end() if there are no normal cleanups.
|
||||
stable_iterator getInnermostNormalCleanup() const {
|
||||
return InnermostNormalCleanup;
|
||||
}
|
||||
|
||||
/// Determines whether there are any EH cleanups on the stack.
|
||||
bool hasEHCleanups() const {
|
||||
return InnermostEHCleanup != stable_end();
|
||||
}
|
||||
|
||||
/// Returns the innermost EH cleanup on the stack, or stable_end()
|
||||
/// if there are no EH cleanups.
|
||||
stable_iterator getInnermostEHCleanup() const {
|
||||
return InnermostEHCleanup;
|
||||
}
|
||||
|
||||
/// An unstable reference to a scope-stack depth. Invalidated by
|
||||
/// pushes but not pops.
|
||||
class iterator;
|
||||
|
||||
/// Returns an iterator pointing to the innermost EH scope.
|
||||
iterator begin() const;
|
||||
|
||||
/// Returns an iterator pointing to the outermost EH scope.
|
||||
iterator end() const;
|
||||
|
||||
/// Create a stable reference to the top of the EH stack. The
|
||||
/// returned reference is valid until that scope is popped off the
|
||||
/// stack.
|
||||
stable_iterator stable_begin() const {
|
||||
return stable_iterator(EndOfBuffer - StartOfData);
|
||||
}
|
||||
|
||||
/// Create a stable reference to the bottom of the EH stack.
|
||||
static stable_iterator stable_end() {
|
||||
return stable_iterator(0);
|
||||
}
|
||||
|
||||
/// Translates an iterator into a stable_iterator.
|
||||
stable_iterator stabilize(iterator it) const;
|
||||
|
||||
/// Finds the nearest cleanup enclosing the given iterator.
|
||||
/// Returns stable_iterator::invalid() if there are no such cleanups.
|
||||
stable_iterator getEnclosingEHCleanup(iterator it) const;
|
||||
|
||||
/// Turn a stable reference to a scope depth into a unstable pointer
|
||||
/// to the EH stack.
|
||||
iterator find(stable_iterator save) const;
|
||||
|
||||
/// Removes the cleanup pointed to by the given stable_iterator.
|
||||
void removeCleanup(stable_iterator save);
|
||||
|
||||
/// Add a branch fixup to the current cleanup scope.
|
||||
BranchFixup &addBranchFixup() {
|
||||
assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
|
||||
BranchFixups.push_back(BranchFixup());
|
||||
return BranchFixups.back();
|
||||
}
|
||||
|
||||
unsigned getNumBranchFixups() const { return BranchFixups.size(); }
|
||||
BranchFixup &getBranchFixup(unsigned I) {
|
||||
assert(I < getNumBranchFixups());
|
||||
return BranchFixups[I];
|
||||
}
|
||||
|
||||
/// Mark any branch fixups leading to the given block as resolved.
|
||||
void resolveBranchFixups(llvm::BasicBlock *Dest);
|
||||
};
|
||||
|
||||
/// CodeGenFunction - This class organizes the per-function state that is used
|
||||
/// while generating LLVM code.
|
||||
class CodeGenFunction : public BlockFunction {
|
||||
CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT
|
||||
void operator=(const CodeGenFunction&); // DO NOT IMPLEMENT
|
||||
public:
|
||||
/// A jump destination is a pair of a basic block and a cleanup
|
||||
/// depth. They are used to implement direct jumps across cleanup
|
||||
/// scopes, e.g. goto, break, continue, and return.
|
||||
struct JumpDest {
|
||||
JumpDest() : Block(0), ScopeDepth() {}
|
||||
JumpDest(llvm::BasicBlock *Block, EHScopeStack::stable_iterator Depth)
|
||||
: Block(Block), ScopeDepth(Depth) {}
|
||||
|
||||
llvm::BasicBlock *Block;
|
||||
EHScopeStack::stable_iterator ScopeDepth;
|
||||
};
|
||||
|
||||
CodeGenModule &CGM; // Per-module state.
|
||||
const TargetInfo &Target;
|
||||
|
||||
|
@ -94,7 +327,8 @@ public:
|
|||
GlobalDecl CurGD;
|
||||
|
||||
/// ReturnBlock - Unified return block.
|
||||
llvm::BasicBlock *ReturnBlock;
|
||||
JumpDest ReturnBlock;
|
||||
|
||||
/// ReturnValue - The temporary alloca to hold the return value. This is null
|
||||
/// iff the function has no return value.
|
||||
llvm::Value *ReturnValue;
|
||||
|
@ -113,141 +347,99 @@ public:
|
|||
/// \brief A mapping from NRVO variables to the flags used to indicate
|
||||
/// when the NRVO has been applied to this variable.
|
||||
llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
|
||||
|
||||
|
||||
EHScopeStack EHStack;
|
||||
|
||||
/// The exception slot. All landing pads write the current
|
||||
/// exception pointer into this alloca.
|
||||
llvm::Value *ExceptionSlot;
|
||||
|
||||
/// Emits a landing pad for the current EH stack.
|
||||
llvm::BasicBlock *EmitLandingPad();
|
||||
|
||||
llvm::BasicBlock *getInvokeDestImpl();
|
||||
|
||||
public:
|
||||
/// ObjCEHValueStack - Stack of Objective-C exception values, used for
|
||||
/// rethrows.
|
||||
llvm::SmallVector<llvm::Value*, 8> ObjCEHValueStack;
|
||||
|
||||
/// PushCleanupBlock - Push a new cleanup entry on the stack and set the
|
||||
/// passed in block as the cleanup block.
|
||||
void PushCleanupBlock(llvm::BasicBlock *CleanupEntryBlock,
|
||||
llvm::BasicBlock *CleanupExitBlock,
|
||||
llvm::BasicBlock *PreviousInvokeDest,
|
||||
bool EHOnly = false);
|
||||
void PushCleanupBlock(llvm::BasicBlock *CleanupEntryBlock) {
|
||||
PushCleanupBlock(CleanupEntryBlock, 0, getInvokeDest(), false);
|
||||
}
|
||||
|
||||
/// CleanupBlockInfo - A struct representing a popped cleanup block.
|
||||
struct CleanupBlockInfo {
|
||||
/// CleanupEntryBlock - the cleanup entry block
|
||||
llvm::BasicBlock *CleanupBlock;
|
||||
|
||||
/// SwitchBlock - the block (if any) containing the switch instruction used
|
||||
/// for jumping to the final destination.
|
||||
llvm::BasicBlock *SwitchBlock;
|
||||
|
||||
/// EndBlock - the default destination for the switch instruction.
|
||||
llvm::BasicBlock *EndBlock;
|
||||
|
||||
/// EHOnly - True iff this cleanup should only be performed on the
|
||||
/// exceptional edge.
|
||||
bool EHOnly;
|
||||
|
||||
CleanupBlockInfo(llvm::BasicBlock *cb, llvm::BasicBlock *sb,
|
||||
llvm::BasicBlock *eb, bool ehonly = false)
|
||||
: CleanupBlock(cb), SwitchBlock(sb), EndBlock(eb), EHOnly(ehonly) {}
|
||||
// A struct holding information about a finally block's IR
|
||||
// generation. For now, doesn't actually hold anything.
|
||||
struct FinallyInfo {
|
||||
};
|
||||
|
||||
/// EHCleanupBlock - RAII object that will create a cleanup block for the
|
||||
/// exceptional edge and set the insert point to that block. When destroyed,
|
||||
/// it creates the cleanup edge and sets the insert point to the previous
|
||||
/// block.
|
||||
class EHCleanupBlock {
|
||||
CodeGenFunction& CGF;
|
||||
llvm::BasicBlock *PreviousInsertionBlock;
|
||||
llvm::BasicBlock *CleanupHandler;
|
||||
llvm::BasicBlock *PreviousInvokeDest;
|
||||
public:
|
||||
EHCleanupBlock(CodeGenFunction &cgf)
|
||||
: CGF(cgf),
|
||||
PreviousInsertionBlock(CGF.Builder.GetInsertBlock()),
|
||||
CleanupHandler(CGF.createBasicBlock("ehcleanup", CGF.CurFn)),
|
||||
PreviousInvokeDest(CGF.getInvokeDest()) {
|
||||
llvm::BasicBlock *TerminateHandler = CGF.getTerminateHandler();
|
||||
CGF.Builder.SetInsertPoint(CleanupHandler);
|
||||
CGF.setInvokeDest(TerminateHandler);
|
||||
}
|
||||
~EHCleanupBlock();
|
||||
};
|
||||
FinallyInfo EnterFinallyBlock(const Stmt *Stmt,
|
||||
llvm::Constant *BeginCatchFn,
|
||||
llvm::Constant *EndCatchFn,
|
||||
llvm::Constant *RethrowFn);
|
||||
void ExitFinallyBlock(FinallyInfo &FinallyInfo);
|
||||
|
||||
/// PopCleanupBlock - Will pop the cleanup entry on the stack, process all
|
||||
/// branch fixups and return a block info struct with the switch block and end
|
||||
/// block. This will also reset the invoke handler to the previous value
|
||||
/// from when the cleanup block was created.
|
||||
CleanupBlockInfo PopCleanupBlock();
|
||||
enum CleanupKind { NormalAndEHCleanup, EHCleanup, NormalCleanup };
|
||||
|
||||
/// DelayedCleanupBlock - RAII object that will create a cleanup block and set
|
||||
/// the insert point to that block. When destructed, it sets the insert point
|
||||
/// to the previous block and pushes a new cleanup entry on the stack.
|
||||
class DelayedCleanupBlock {
|
||||
CodeGenFunction& CGF;
|
||||
llvm::BasicBlock *CurBB;
|
||||
llvm::BasicBlock *CleanupEntryBB;
|
||||
llvm::BasicBlock *CleanupExitBB;
|
||||
llvm::BasicBlock *CurInvokeDest;
|
||||
bool EHOnly;
|
||||
/// PushDestructorCleanup - Push a cleanup to call the
|
||||
/// complete-object destructor of an object of the given type at the
|
||||
/// given address. Does nothing if T is not a C++ class type with a
|
||||
/// non-trivial destructor.
|
||||
void PushDestructorCleanup(QualType T, llvm::Value *Addr);
|
||||
|
||||
/// PopCleanupBlock - Will pop the cleanup entry on the stack and
|
||||
/// process all branch fixups.
|
||||
void PopCleanupBlock();
|
||||
|
||||
/// CleanupBlock - RAII object that will create a cleanup block and
|
||||
/// set the insert point to that block. When destructed, it sets the
|
||||
/// insert point to the previous block and pushes a new cleanup
|
||||
/// entry on the stack.
|
||||
class CleanupBlock {
|
||||
CodeGenFunction &CGF;
|
||||
CGBuilderTy::InsertPoint SavedIP;
|
||||
llvm::BasicBlock *NormalCleanupEntryBB;
|
||||
llvm::BasicBlock *NormalCleanupExitBB;
|
||||
llvm::BasicBlock *EHCleanupEntryBB;
|
||||
|
||||
public:
|
||||
DelayedCleanupBlock(CodeGenFunction &cgf, bool ehonly = false)
|
||||
: CGF(cgf), CurBB(CGF.Builder.GetInsertBlock()),
|
||||
CleanupEntryBB(CGF.createBasicBlock("cleanup")),
|
||||
CleanupExitBB(0),
|
||||
CurInvokeDest(CGF.getInvokeDest()),
|
||||
EHOnly(ehonly) {
|
||||
CGF.Builder.SetInsertPoint(CleanupEntryBB);
|
||||
}
|
||||
CleanupBlock(CodeGenFunction &CGF, CleanupKind Kind);
|
||||
|
||||
llvm::BasicBlock *getCleanupExitBlock() {
|
||||
if (!CleanupExitBB)
|
||||
CleanupExitBB = CGF.createBasicBlock("cleanup.exit");
|
||||
return CleanupExitBB;
|
||||
}
|
||||
/// If we're currently writing a normal cleanup, tie that off and
|
||||
/// start writing an EH cleanup.
|
||||
void beginEHCleanup();
|
||||
|
||||
~DelayedCleanupBlock() {
|
||||
CGF.PushCleanupBlock(CleanupEntryBB, CleanupExitBB, CurInvokeDest,
|
||||
EHOnly);
|
||||
// FIXME: This is silly, move this into the builder.
|
||||
if (CurBB)
|
||||
CGF.Builder.SetInsertPoint(CurBB);
|
||||
else
|
||||
CGF.Builder.ClearInsertionPoint();
|
||||
}
|
||||
~CleanupBlock();
|
||||
};
|
||||
|
||||
/// \brief Enters a new scope for capturing cleanups, all of which will be
|
||||
/// executed once the scope is exited.
|
||||
class CleanupScope {
|
||||
/// \brief Enters a new scope for capturing cleanups, all of which
|
||||
/// will be executed once the scope is exited.
|
||||
class RunCleanupsScope {
|
||||
CodeGenFunction& CGF;
|
||||
size_t CleanupStackDepth;
|
||||
EHScopeStack::stable_iterator CleanupStackDepth;
|
||||
bool OldDidCallStackSave;
|
||||
bool PerformCleanup;
|
||||
|
||||
CleanupScope(const CleanupScope &); // DO NOT IMPLEMENT
|
||||
CleanupScope &operator=(const CleanupScope &); // DO NOT IMPLEMENT
|
||||
RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT
|
||||
RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT
|
||||
|
||||
public:
|
||||
/// \brief Enter a new cleanup scope.
|
||||
explicit CleanupScope(CodeGenFunction &CGF)
|
||||
explicit RunCleanupsScope(CodeGenFunction &CGF)
|
||||
: CGF(CGF), PerformCleanup(true)
|
||||
{
|
||||
CleanupStackDepth = CGF.CleanupEntries.size();
|
||||
CleanupStackDepth = CGF.EHStack.stable_begin();
|
||||
OldDidCallStackSave = CGF.DidCallStackSave;
|
||||
}
|
||||
|
||||
/// \brief Exit this cleanup scope, emitting any accumulated
|
||||
/// cleanups.
|
||||
~CleanupScope() {
|
||||
~RunCleanupsScope() {
|
||||
if (PerformCleanup) {
|
||||
CGF.DidCallStackSave = OldDidCallStackSave;
|
||||
CGF.EmitCleanupBlocks(CleanupStackDepth);
|
||||
CGF.PopCleanupBlocks(CleanupStackDepth);
|
||||
}
|
||||
}
|
||||
|
||||
/// \brief Determine whether this scope requires any cleanups.
|
||||
bool requiresCleanups() const {
|
||||
return CGF.CleanupEntries.size() > CleanupStackDepth;
|
||||
return CGF.EHStack.stable_begin() != CleanupStackDepth;
|
||||
}
|
||||
|
||||
/// \brief Force the emission of cleanups now, instead of waiting
|
||||
|
@ -255,42 +447,39 @@ public:
|
|||
void ForceCleanup() {
|
||||
assert(PerformCleanup && "Already forced cleanup");
|
||||
CGF.DidCallStackSave = OldDidCallStackSave;
|
||||
CGF.EmitCleanupBlocks(CleanupStackDepth);
|
||||
CGF.PopCleanupBlocks(CleanupStackDepth);
|
||||
PerformCleanup = false;
|
||||
}
|
||||
};
|
||||
|
||||
/// CXXTemporariesCleanupScope - Enters a new scope for catching live
|
||||
/// temporaries, all of which will be popped once the scope is exited.
|
||||
class CXXTemporariesCleanupScope {
|
||||
CodeGenFunction &CGF;
|
||||
size_t NumLiveTemporaries;
|
||||
|
||||
// DO NOT IMPLEMENT
|
||||
CXXTemporariesCleanupScope(const CXXTemporariesCleanupScope &);
|
||||
CXXTemporariesCleanupScope &operator=(const CXXTemporariesCleanupScope &);
|
||||
|
||||
public:
|
||||
explicit CXXTemporariesCleanupScope(CodeGenFunction &CGF)
|
||||
: CGF(CGF), NumLiveTemporaries(CGF.LiveTemporaries.size()) { }
|
||||
|
||||
~CXXTemporariesCleanupScope() {
|
||||
while (CGF.LiveTemporaries.size() > NumLiveTemporaries)
|
||||
CGF.PopCXXTemporary();
|
||||
}
|
||||
};
|
||||
|
||||
/// PopCleanupBlocks - Takes the old cleanup stack size and emits
|
||||
/// the cleanup blocks that have been added.
|
||||
void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
|
||||
|
||||
/// EmitCleanupBlocks - Takes the old cleanup stack size and emits the cleanup
|
||||
/// blocks that have been added.
|
||||
void EmitCleanupBlocks(size_t OldCleanupStackSize);
|
||||
/// The given basic block lies in the current EH scope, but may be a
|
||||
/// target of a potentially scope-crossing jump; get a stable handle
|
||||
/// to which we can perform this jump later.
|
||||
JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) const {
|
||||
return JumpDest(Target, EHStack.stable_begin());
|
||||
}
|
||||
|
||||
/// EmitBranchThroughCleanup - Emit a branch from the current insert block
|
||||
/// through the cleanup handling code (if any) and then on to \arg Dest.
|
||||
///
|
||||
/// FIXME: Maybe this should really be in EmitBranch? Don't we always want
|
||||
/// this behavior for branches?
|
||||
void EmitBranchThroughCleanup(llvm::BasicBlock *Dest);
|
||||
/// The given basic block lies in the current EH scope, but may be a
|
||||
/// target of a potentially scope-crossing jump; get a stable handle
|
||||
/// to which we can perform this jump later.
|
||||
JumpDest getJumpDestInCurrentScope(const char *Name = 0) {
|
||||
return JumpDest(createBasicBlock(Name), EHStack.stable_begin());
|
||||
}
|
||||
|
||||
/// EmitBranchThroughCleanup - Emit a branch from the current insert
|
||||
/// block through the normal cleanup handling code (if any) and then
|
||||
/// on to \arg Dest.
|
||||
void EmitBranchThroughCleanup(JumpDest Dest);
|
||||
|
||||
/// EmitBranchThroughEHCleanup - Emit a branch from the current
|
||||
/// insert block through the EH cleanup handling code (if any) and
|
||||
/// then on to \arg Dest.
|
||||
void EmitBranchThroughEHCleanup(JumpDest Dest);
|
||||
|
||||
/// BeginConditionalBranch - Should be called before a conditional part of an
|
||||
/// expression is emitted. For example, before the RHS of the expression below
|
||||
|
@ -327,16 +516,16 @@ private:
|
|||
llvm::DenseMap<const Decl*, llvm::Value*> LocalDeclMap;
|
||||
|
||||
/// LabelMap - This keeps track of the LLVM basic block for each C label.
|
||||
llvm::DenseMap<const LabelStmt*, llvm::BasicBlock*> LabelMap;
|
||||
llvm::DenseMap<const LabelStmt*, JumpDest> LabelMap;
|
||||
|
||||
// BreakContinueStack - This keeps track of where break and continue
|
||||
// statements should jump to.
|
||||
struct BreakContinue {
|
||||
BreakContinue(llvm::BasicBlock *bb, llvm::BasicBlock *cb)
|
||||
: BreakBlock(bb), ContinueBlock(cb) {}
|
||||
BreakContinue(JumpDest Break, JumpDest Continue)
|
||||
: BreakBlock(Break), ContinueBlock(Continue) {}
|
||||
|
||||
llvm::BasicBlock *BreakBlock;
|
||||
llvm::BasicBlock *ContinueBlock;
|
||||
JumpDest BreakBlock;
|
||||
JumpDest ContinueBlock;
|
||||
};
|
||||
llvm::SmallVector<BreakContinue, 8> BreakContinueStack;
|
||||
|
||||
|
@ -364,44 +553,9 @@ private:
|
|||
/// calling llvm.stacksave for multiple VLAs in the same scope.
|
||||
bool DidCallStackSave;
|
||||
|
||||
struct CleanupEntry {
|
||||
/// CleanupEntryBlock - The block of code that does the actual cleanup.
|
||||
llvm::BasicBlock *CleanupEntryBlock;
|
||||
|
||||
/// CleanupExitBlock - The cleanup exit block.
|
||||
llvm::BasicBlock *CleanupExitBlock;
|
||||
|
||||
/// Blocks - Basic blocks that were emitted in the current cleanup scope.
|
||||
std::vector<llvm::BasicBlock *> Blocks;
|
||||
|
||||
/// BranchFixups - Branch instructions to basic blocks that haven't been
|
||||
/// inserted into the current function yet.
|
||||
std::vector<llvm::BranchInst *> BranchFixups;
|
||||
|
||||
/// PreviousInvokeDest - The invoke handler from the start of the cleanup
|
||||
/// region.
|
||||
llvm::BasicBlock *PreviousInvokeDest;
|
||||
|
||||
/// EHOnly - Perform this only on the exceptional edge, not the main edge.
|
||||
bool EHOnly;
|
||||
|
||||
explicit CleanupEntry(llvm::BasicBlock *CleanupEntryBlock,
|
||||
llvm::BasicBlock *CleanupExitBlock,
|
||||
llvm::BasicBlock *PreviousInvokeDest,
|
||||
bool ehonly)
|
||||
: CleanupEntryBlock(CleanupEntryBlock),
|
||||
CleanupExitBlock(CleanupExitBlock),
|
||||
PreviousInvokeDest(PreviousInvokeDest),
|
||||
EHOnly(ehonly) {}
|
||||
};
|
||||
|
||||
/// CleanupEntries - Stack of cleanup entries.
|
||||
llvm::SmallVector<CleanupEntry, 8> CleanupEntries;
|
||||
|
||||
typedef llvm::DenseMap<llvm::BasicBlock*, size_t> BlockScopeMap;
|
||||
|
||||
/// BlockScopes - Map of which "cleanup scope" scope basic blocks have.
|
||||
BlockScopeMap BlockScopes;
|
||||
/// A block containing a single 'unreachable' instruction. Created
|
||||
/// lazily by getUnreachableBlock().
|
||||
llvm::BasicBlock *UnreachableBlock;
|
||||
|
||||
/// CXXThisDecl - When generating code for a C++ member function,
|
||||
/// this will hold the implicit 'this' declaration.
|
||||
|
@ -414,31 +568,6 @@ private:
|
|||
ImplicitParamDecl *CXXVTTDecl;
|
||||
llvm::Value *CXXVTTValue;
|
||||
|
||||
/// CXXLiveTemporaryInfo - Holds information about a live C++ temporary.
|
||||
struct CXXLiveTemporaryInfo {
|
||||
/// Temporary - The live temporary.
|
||||
const CXXTemporary *Temporary;
|
||||
|
||||
/// ThisPtr - The pointer to the temporary.
|
||||
llvm::Value *ThisPtr;
|
||||
|
||||
/// DtorBlock - The destructor block.
|
||||
llvm::BasicBlock *DtorBlock;
|
||||
|
||||
/// CondPtr - If this is a conditional temporary, this is the pointer to the
|
||||
/// condition variable that states whether the destructor should be called
|
||||
/// or not.
|
||||
llvm::Value *CondPtr;
|
||||
|
||||
CXXLiveTemporaryInfo(const CXXTemporary *temporary,
|
||||
llvm::Value *thisptr, llvm::BasicBlock *dtorblock,
|
||||
llvm::Value *condptr)
|
||||
: Temporary(temporary), ThisPtr(thisptr), DtorBlock(dtorblock),
|
||||
CondPtr(condptr) { }
|
||||
};
|
||||
|
||||
llvm::SmallVector<CXXLiveTemporaryInfo, 4> LiveTemporaries;
|
||||
|
||||
/// ConditionalBranchLevel - Contains the nesting level of the current
|
||||
/// conditional branch. This is used so that we know if a temporary should be
|
||||
/// destroyed conditionally.
|
||||
|
@ -454,6 +583,7 @@ private:
|
|||
/// number that holds the value.
|
||||
unsigned getByRefValueLLVMField(const ValueDecl *VD) const;
|
||||
|
||||
llvm::BasicBlock *TerminateLandingPad;
|
||||
llvm::BasicBlock *TerminateHandler;
|
||||
llvm::BasicBlock *TrapBB;
|
||||
|
||||
|
@ -463,8 +593,22 @@ public:
|
|||
ASTContext &getContext() const;
|
||||
CGDebugInfo *getDebugInfo() { return DebugInfo; }
|
||||
|
||||
llvm::BasicBlock *getInvokeDest() { return InvokeDest; }
|
||||
void setInvokeDest(llvm::BasicBlock *B) { InvokeDest = B; }
|
||||
/// Returns a pointer to the function's exception object slot, which
|
||||
/// is assigned in every landing pad.
|
||||
llvm::Value *getExceptionSlot();
|
||||
|
||||
llvm::BasicBlock *getUnreachableBlock() {
|
||||
if (!UnreachableBlock) {
|
||||
UnreachableBlock = createBasicBlock("unreachable");
|
||||
new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock);
|
||||
}
|
||||
return UnreachableBlock;
|
||||
}
|
||||
|
||||
llvm::BasicBlock *getInvokeDest() {
|
||||
if (!EHStack.requiresLandingPad()) return 0;
|
||||
return getInvokeDestImpl();
|
||||
}
|
||||
|
||||
llvm::LLVMContext &getLLVMContext() { return VMContext; }
|
||||
|
||||
|
@ -594,7 +738,12 @@ public:
|
|||
/// EmitEndEHSpec - Emit the end of the exception spec.
|
||||
void EmitEndEHSpec(const Decl *D);
|
||||
|
||||
/// getTerminateHandler - Return a handler that just calls terminate.
|
||||
/// getTerminateLandingPad - Return a landing pad that just calls terminate.
|
||||
llvm::BasicBlock *getTerminateLandingPad();
|
||||
|
||||
/// getTerminateHandler - Return a handler (not a landing pad, just
|
||||
/// a catch handler) that just calls terminate. This is used when
|
||||
/// a terminate scope encloses a try.
|
||||
llvm::BasicBlock *getTerminateHandler();
|
||||
|
||||
const llvm::Type *ConvertTypeForMem(QualType T);
|
||||
|
@ -627,7 +776,7 @@ public:
|
|||
|
||||
/// getBasicBlockForLabel - Return the LLVM basicblock that the specified
|
||||
/// label maps to.
|
||||
llvm::BasicBlock *getBasicBlockForLabel(const LabelStmt *S);
|
||||
JumpDest getJumpDestForLabel(const LabelStmt *S);
|
||||
|
||||
/// SimplifyForwardingBlocks - If the given basic block is only a branch to
|
||||
/// another basic block, simplify it. This assumes that no other code could
|
||||
|
@ -855,8 +1004,7 @@ public:
|
|||
void EmitNewArrayInitializer(const CXXNewExpr *E, llvm::Value *NewPtr,
|
||||
llvm::Value *NumElements);
|
||||
|
||||
void PushCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr);
|
||||
void PopCXXTemporary();
|
||||
void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr);
|
||||
|
||||
llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E);
|
||||
void EmitCXXDeleteExpr(const CXXDeleteExpr *E);
|
||||
|
@ -887,10 +1035,13 @@ public:
|
|||
/// This function can be called with a null (unreachable) insert point.
|
||||
void EmitBlockVarDecl(const VarDecl &D);
|
||||
|
||||
typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D,
|
||||
llvm::Value *Address);
|
||||
|
||||
/// EmitLocalBlockVarDecl - Emit a local block variable declaration.
|
||||
///
|
||||
/// This function can be called with a null (unreachable) insert point.
|
||||
void EmitLocalBlockVarDecl(const VarDecl &D);
|
||||
void EmitLocalBlockVarDecl(const VarDecl &D, SpecialInitFn *SpecialInit = 0);
|
||||
|
||||
void EmitStaticBlockVarDecl(const VarDecl &D,
|
||||
llvm::GlobalValue::LinkageTypes Linkage);
|
||||
|
@ -951,11 +1102,7 @@ public:
|
|||
void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S);
|
||||
|
||||
llvm::Constant *getUnwindResumeOrRethrowFn();
|
||||
struct CXXTryStmtInfo {
|
||||
llvm::BasicBlock *SavedLandingPad;
|
||||
llvm::BasicBlock *HandlerBlock;
|
||||
llvm::BasicBlock *FinallyBlock;
|
||||
};
|
||||
struct CXXTryStmtInfo {};
|
||||
CXXTryStmtInfo EnterCXXTryStmt(const CXXTryStmt &S);
|
||||
void ExitCXXTryStmt(const CXXTryStmt &S, CXXTryStmtInfo Info);
|
||||
|
||||
|
@ -1128,6 +1275,11 @@ public:
|
|||
RValue EmitCallExpr(const CallExpr *E,
|
||||
ReturnValueSlot ReturnValue = ReturnValueSlot());
|
||||
|
||||
llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
|
||||
llvm::Value * const *ArgBegin,
|
||||
llvm::Value * const *ArgEnd,
|
||||
const llvm::Twine &Name = "");
|
||||
|
||||
llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This,
|
||||
const llvm::Type *Ty);
|
||||
llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type,
|
||||
|
@ -1331,7 +1483,6 @@ public:
|
|||
RValue EmitDelegateCallArg(const VarDecl *Param);
|
||||
|
||||
private:
|
||||
|
||||
void EmitReturnOfRValue(RValue RV, QualType Ty);
|
||||
|
||||
/// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty
|
||||
|
@ -1354,13 +1505,6 @@ private:
|
|||
const TargetInfo::ConstraintInfo &Info,
|
||||
const Expr *InputExpr, std::string &ConstraintStr);
|
||||
|
||||
/// EmitCleanupBlock - emits a single cleanup block.
|
||||
void EmitCleanupBlock();
|
||||
|
||||
/// AddBranchFixup - adds a branch instruction to the list of fixups for the
|
||||
/// current cleanup scope.
|
||||
void AddBranchFixup(llvm::BranchInst *BI);
|
||||
|
||||
/// EmitCallArgs - Emit call arguments for a function.
|
||||
/// The CallArgTypeInfo parameter is used for iterating over the known
|
||||
/// argument types of the function being called.
|
||||
|
|
|
@ -27,16 +27,18 @@ struct X5 : X0, X4 { };
|
|||
|
||||
void test(X2 x2, X3 x3, X5 x5) {
|
||||
// CHECK: define linkonce_odr void @_ZN2X2C1ERKS_
|
||||
// CHECK-NOT: define
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK-NOT: define
|
||||
// CHECK-NOT: }
|
||||
// CHECK: ret void
|
||||
// CHECK-NOT: }
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK: }
|
||||
X2 x2a(x2);
|
||||
// CHECK: define linkonce_odr void @_ZN2X3C1ERKS_
|
||||
// CHECK-NOT: define
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK-NOT: define
|
||||
// CHECK-NOT: }
|
||||
// CHECK: ret void
|
||||
// CHECK-NOT: }
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK: }
|
||||
X3 x3a(x3);
|
||||
// CHECK: define linkonce_odr void @_ZN2X5C1ERS_
|
||||
// CHECK-NOT: call void @__cxa_call_unexpected
|
||||
|
@ -58,10 +60,11 @@ struct X9 : X6, X7 { };
|
|||
|
||||
void test() {
|
||||
// CHECK: define linkonce_odr void @_ZN2X8C1Ev
|
||||
// CHECK-NOT: define
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK-NOT: define
|
||||
// CHECK-NOT: }
|
||||
// CHECK: ret void
|
||||
// CHECK-NOT: }
|
||||
// CHECK: call void @__cxa_call_unexpected
|
||||
// CHECK: }
|
||||
X8();
|
||||
// CHECK: define linkonce_odr void @_ZN2X9C1Ev
|
||||
// CHECK-NOT: call void @__cxa_call_unexpected
|
||||
|
|
|
@ -96,66 +96,132 @@ void switch_destruct(int z) {
|
|||
|
||||
int foo();
|
||||
|
||||
// CHECK: define void @_Z14while_destructi
|
||||
void while_destruct(int z) {
|
||||
// CHECK: define void @_Z14while_destructi
|
||||
// CHECK: {{while.cond:|:3}}
|
||||
// CHECK: [[Z:%.*]] = alloca i32
|
||||
// CHECK: [[CLEANUPDEST:%.*]] = alloca i32
|
||||
while (X x = X()) {
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// CHECK-NEXT: [[COND:%.*]] = call zeroext i1 @_ZN1XcvbEv
|
||||
// CHECK-NEXT: br i1 [[COND]]
|
||||
|
||||
// CHECK: {{while.body:|:5}}
|
||||
// CHECK: store i32 21
|
||||
// Loop-exit staging block.
|
||||
// CHECK: store i32 1, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
|
||||
// While body.
|
||||
// CHECK: store i32 21, i32* [[Z]]
|
||||
// CHECK: store i32 2, i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: br
|
||||
z = 21;
|
||||
|
||||
// CHECK: {{while.cleanup:|:6}}
|
||||
// Cleanup.
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: [[DEST:%.*]] = load i32* [[CLEANUPDEST]]
|
||||
// CHECK-NEXT: switch i32 [[DEST]]
|
||||
}
|
||||
// CHECK: {{while.end|:8}}
|
||||
// CHECK: store i32 22
|
||||
|
||||
// CHECK: store i32 22, i32* [[Z]]
|
||||
z = 22;
|
||||
|
||||
// CHECK: call void @_Z4getXv
|
||||
// CHECK: call zeroext i1 @_ZN1XcvbEv
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK: br
|
||||
// CHECK-NEXT: call zeroext i1 @_ZN1XcvbEv
|
||||
// CHECK-NEXT: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: br
|
||||
while(getX()) { }
|
||||
|
||||
// CHECK: store i32 25
|
||||
// CHECK: store i32 25, i32* [[Z]]
|
||||
z = 25;
|
||||
|
||||
// CHECK: ret
|
||||
}
|
||||
|
||||
// CHECK: define void @_Z12for_destructi(
|
||||
void for_destruct(int z) {
|
||||
// CHECK: define void @_Z12for_destruct
|
||||
// CHECK: [[Z:%.*]] = alloca i32
|
||||
// CHECK: [[XDEST:%.*]] = alloca i32
|
||||
// CHECK: [[I:%.*]] = alloca i32
|
||||
// CHECK: call void @_ZN1YC1Ev
|
||||
for(Y y = Y(); X x = X(); ++z)
|
||||
// CHECK: {{for.cond:|:4}}
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.cond
|
||||
|
||||
for(Y y = Y(); X x = X(); ++z) {
|
||||
// %for.cond: The loop condition.
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// CHECK: {{for.body:|:6}}
|
||||
// CHECK: store i32 23
|
||||
// CHECK-NEXT: [[COND:%.*]] = call zeroext i1 @_ZN1XcvbEv(
|
||||
// CHECK-NEXT: br i1 [[COND]]
|
||||
// -> %for.body, %for.cond.cleanup
|
||||
|
||||
// %for.cond.cleanup: Exit cleanup staging.
|
||||
// CHECK: store i32 1, i32* [[XDEST]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %cleanup
|
||||
|
||||
// %for.body:
|
||||
// CHECK: store i32 23, i32* [[Z]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.inc
|
||||
z = 23;
|
||||
// CHECK: {{for.inc:|:7}}
|
||||
// CHECK: br label %{{for.cond.cleanup|10}}
|
||||
// CHECK: {{for.cond.cleanup:|:10}}
|
||||
|
||||
// %for.inc:
|
||||
// CHECK: [[TMP:%.*]] = load i32* [[Z]]
|
||||
// CHECK-NEXT: [[INC:%.*]] = add nsw i32 [[TMP]], 1
|
||||
// CHECK-NEXT: store i32 [[INC]], i32* [[Z]]
|
||||
// CHECK-NEXT: store i32 2, i32* [[XDEST]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %cleanup
|
||||
|
||||
// %cleanup: Destroys X.
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK: {{for.end:|:12}}
|
||||
// CHECK: call void @_ZN1YD1Ev
|
||||
// CHECK-NEXT: [[YDESTTMP:%.*]] = load i32* [[XDEST]]
|
||||
// CHECK-NEXT: switch i32 [[YDESTTMP]]
|
||||
// 1 -> %cleanup4, 2 -> %cleanup.cont
|
||||
|
||||
// %cleanup.cont: (eliminable)
|
||||
// CHECK: br
|
||||
// -> %for.cond
|
||||
|
||||
// %cleanup4: Destroys Y.
|
||||
// CHECK: call void @_ZN1YD1Ev(
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.end
|
||||
}
|
||||
|
||||
// %for.end:
|
||||
// CHECK: store i32 24
|
||||
z = 24;
|
||||
|
||||
// CHECK-NEXT: store i32 0, i32* [[I]]
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.cond6
|
||||
|
||||
// %for.cond6:
|
||||
// CHECK: call void @_Z4getXv
|
||||
// CHECK: call zeroext i1 @_ZN1XcvbEv
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: call zeroext i1 @_ZN1XcvbEv
|
||||
// CHECK-NEXT: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.body10, %for.end16
|
||||
|
||||
// %for.body10:
|
||||
// CHECK: br
|
||||
// -> %for.inc11
|
||||
|
||||
// %for.inc11:
|
||||
// CHECK: call void @_Z4getXv
|
||||
// CHECK: load
|
||||
// CHECK: add
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: load i32* [[I]]
|
||||
// CHECK-NEXT: add
|
||||
// CHECK-NEXT: store
|
||||
// CHECK-NEXT: call void @_ZN1XD1Ev
|
||||
// CHECK-NEXT: br
|
||||
// -> %for.cond6
|
||||
int i = 0;
|
||||
for(; getX(); getX(), ++i) { }
|
||||
z = 26;
|
||||
|
||||
// %for.end16
|
||||
// CHECK: store i32 26
|
||||
// CHECK: ret
|
||||
z = 26;
|
||||
|
||||
// CHECK-NEXT: ret void
|
||||
}
|
||||
|
||||
void do_destruct(int z) {
|
||||
|
|
|
@ -187,15 +187,63 @@ namespace test3 {
|
|||
// Checked at top of file:
|
||||
// @_ZN5test312_GLOBAL__N_11CD1Ev = alias internal {{.*}} @_ZN5test312_GLOBAL__N_11CD2Ev
|
||||
|
||||
// More checks at end of file.
|
||||
|
||||
}
|
||||
|
||||
namespace test4 {
|
||||
struct A { ~A(); };
|
||||
|
||||
// CHECK: define void @_ZN5test43fooEv()
|
||||
// CHECK: call void @_ZN5test41AD1Ev
|
||||
// CHECK: ret void
|
||||
void foo() {
|
||||
{
|
||||
A a;
|
||||
goto failure;
|
||||
}
|
||||
|
||||
failure:
|
||||
return;
|
||||
}
|
||||
|
||||
// CHECK: define void @_ZN5test43barEi(
|
||||
// CHECK: [[X:%.*]] = alloca i32
|
||||
// CHECK-NEXT: [[A:%.*]] = alloca
|
||||
// CHECK: br label
|
||||
// CHECK: [[TMP:%.*]] = load i32* [[X]]
|
||||
// CHECK-NEXT: [[CMP:%.*]] = icmp ne i32 [[TMP]], 0
|
||||
// CHECK-NEXT: br i1
|
||||
// CHECK: call void @_ZN5test41AD1Ev(
|
||||
// CHECK: br label
|
||||
// CHECK: [[TMP:%.*]] = load i32* [[X]]
|
||||
// CHECK: [[TMP2:%.*]] = add nsw i32 [[TMP]], -1
|
||||
// CHECK: store i32 [[TMP2]], i32* [[X]]
|
||||
// CHECK: br label
|
||||
// CHECK: ret void
|
||||
void bar(int x) {
|
||||
for (A a; x; ) {
|
||||
x--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks from test3:
|
||||
|
||||
// CHECK: define internal void @_ZN5test312_GLOBAL__N_11CD2Ev(
|
||||
// CHECK: call void @_ZN5test31BD2Ev(
|
||||
// CHECK: call void @_ZN5test31AD2Ev(
|
||||
// CHECK: ret void
|
||||
|
||||
// CHECK: define internal void @_ZN5test312_GLOBAL__N_11DD0Ev(
|
||||
// CHECK: call void @_ZN5test312_GLOBAL__N_11DD1Ev(
|
||||
// CHECK: invoke void @_ZN5test312_GLOBAL__N_11DD1Ev(
|
||||
// CHECK: call void @_ZdlPv(
|
||||
// CHECK: ret void
|
||||
// CHECK: call i8* @llvm.eh.exception(
|
||||
// CHECK: invoke void @_ZdlPv
|
||||
// CHECK: call void @_Unwind_Resume_or_Rethrow
|
||||
// CHECK: call i8* @llvm.eh.exception(
|
||||
// CHECK: call void @_ZSt9terminatev(
|
||||
|
||||
// Checked at top of file:
|
||||
// @_ZN5test312_GLOBAL__N_11DD1Ev = alias internal {{.*}} @_ZN5test312_GLOBAL__N_11DD2Ev
|
||||
|
@ -215,9 +263,14 @@ namespace test3 {
|
|||
// CHECK: declare void @_ZN5test31AD2Ev(
|
||||
|
||||
// CHECK: define internal void @_ZN5test312_GLOBAL__N_11CD0Ev(
|
||||
// CHECK: call void @_ZN5test312_GLOBAL__N_11CD1Ev(
|
||||
// CHECK: invoke void @_ZN5test312_GLOBAL__N_11CD1Ev(
|
||||
// CHECK: call void @_ZdlPv(
|
||||
// CHECK: ret void
|
||||
// CHECK: call i8* @llvm.eh.exception()
|
||||
// CHECK: invoke void @_ZdlPv(
|
||||
// CHECK: call void @_Unwind_Resume_or_Rethrow(
|
||||
// CHECK: call i8* @llvm.eh.exception()
|
||||
// CHECK: call void @_ZSt9terminatev()
|
||||
|
||||
// CHECK: define internal void @_ZThn8_N5test312_GLOBAL__N_11CD1Ev(
|
||||
// CHECK: getelementptr inbounds i8* {{.*}}, i64 -8
|
||||
|
@ -228,4 +281,3 @@ namespace test3 {
|
|||
// CHECK: getelementptr inbounds i8* {{.*}}, i64 -8
|
||||
// CHECK: call void @_ZN5test312_GLOBAL__N_11CD0Ev(
|
||||
// CHECK: ret void
|
||||
}
|
||||
|
|
|
@ -38,6 +38,7 @@ void test2() {
|
|||
// CHECK: define void @_Z5test2v()
|
||||
// CHECK: [[FREEVAR:%.*]] = alloca i1
|
||||
// CHECK-NEXT: [[EXNOBJVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[EXNSLOTVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: store i1 false, i1* [[FREEVAR]]
|
||||
// CHECK-NEXT: [[EXNOBJ:%.*]] = call i8* @__cxa_allocate_exception(i64 16)
|
||||
// CHECK-NEXT: store i8* [[EXNOBJ]], i8** [[EXNOBJVAR]]
|
||||
|
@ -104,3 +105,91 @@ namespace test5 {
|
|||
// : [[HANDLER]]: (can't check this in Release-Asserts builds)
|
||||
// CHECK: {{%.*}} = call i32 @llvm.eh.typeid.for(i8* bitcast ({{%.*}}* @_ZTIN5test51AE to i8*))
|
||||
}
|
||||
|
||||
namespace test6 {
|
||||
template <class T> struct allocator {
|
||||
~allocator() throw() { }
|
||||
};
|
||||
|
||||
void foo() {
|
||||
allocator<int> a;
|
||||
}
|
||||
}
|
||||
|
||||
// PR7127
|
||||
namespace test7 {
|
||||
// CHECK: define i32 @_ZN5test73fooEv()
|
||||
int foo() {
|
||||
// CHECK: [[FREEEXNOBJ:%.*]] = alloca i1
|
||||
// CHECK-NEXT: [[EXNALLOCVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[CAUGHTEXNVAR:%.*]] = alloca i8*
|
||||
// CHECK-NEXT: [[INTCATCHVAR:%.*]] = alloca i32
|
||||
// CHECK-NEXT: store i1 false, i1* [[FREEEXNOBJ]]
|
||||
try {
|
||||
try {
|
||||
// CHECK-NEXT: [[EXNALLOC:%.*]] = call i8* @__cxa_allocate_exception
|
||||
// CHECK-NEXT: store i8* [[EXNALLOC]], i8** [[EXNALLOCVAR]]
|
||||
// CHECK-NEXT: store i1 true, i1* [[FREEEXNOBJ]]
|
||||
// CHECK-NEXT: bitcast i8* [[EXNALLOC]] to i32*
|
||||
// CHECK-NEXT: store i32 1, i32*
|
||||
// CHECK-NEXT: store i1 false, i1* [[FREEEXNOBJ]]
|
||||
// CHECK-NEXT: invoke void @__cxa_throw(i8* [[EXNALLOC]], i8* bitcast (i8** @_ZTIi to i8*), i8* null
|
||||
throw 1;
|
||||
}
|
||||
// This cleanup ends up here for no good reason. It's actually unused.
|
||||
// CHECK: load i8** [[EXNALLOCVAR]]
|
||||
// CHECK-NEXT: call void @__cxa_free_exception(
|
||||
|
||||
// CHECK: [[CAUGHTEXN:%.*]] = call i8* @llvm.eh.exception()
|
||||
// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]]
|
||||
// CHECK-NEXT: call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* [[CAUGHTEXN]], i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* bitcast (i8** @_ZTIi to i8*), i8* null)
|
||||
// CHECK-NEXT: call i32 @llvm.eh.typeid.for(i8* bitcast (i8** @_ZTIi to i8*))
|
||||
// CHECK-NEXT: icmp eq
|
||||
// CHECK-NEXT: br i1
|
||||
// CHECK: load i8** [[CAUGHTEXNVAR]]
|
||||
// CHECK-NEXT: call i8* @__cxa_begin_catch
|
||||
// CHECK: invoke void @__cxa_rethrow
|
||||
catch (int) {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
// CHECK: [[CAUGHTEXN:%.*]] = call i8* @llvm.eh.exception()
|
||||
// CHECK-NEXT: store i8* [[CAUGHTEXN]], i8** [[CAUGHTEXNVAR]]
|
||||
// CHECK-NEXT: call i32 (i8*, i8*, ...)* @llvm.eh.selector(i8* [[CAUGHTEXN]], i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*), i8* null)
|
||||
// CHECK-NEXT: call void @__cxa_end_catch()
|
||||
// CHECK-NEXT: br label
|
||||
// CHECK: load i8** [[CAUGHTEXNVAR]]
|
||||
// CHECK-NEXT: call i8* @__cxa_begin_catch
|
||||
// CHECK-NEXT: call void @__cxa_end_catch
|
||||
catch (...) {
|
||||
}
|
||||
// CHECK: ret i32 0
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Ordering of destructors in a catch handler.
|
||||
namespace test8 {
|
||||
struct A { A(const A&); ~A(); };
|
||||
void bar();
|
||||
|
||||
// CHECK: define void @_ZN5test83fooEv()
|
||||
void foo() {
|
||||
try {
|
||||
// CHECK: invoke void @_ZN5test83barEv()
|
||||
bar();
|
||||
} catch (A a) {
|
||||
// CHECK: call i8* @__cxa_get_exception_ptr
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: invoke void @_ZN5test81AC1ERKS0_(
|
||||
// CHECK: call i8* @__cxa_begin_catch
|
||||
// CHECK-NEXT: invoke void @_ZN5test81AD1Ev(
|
||||
|
||||
// CHECK: call void @__cxa_end_catch()
|
||||
// CHECK-NEXT: load
|
||||
// CHECK-NEXT: switch
|
||||
|
||||
// CHECK: ret void
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// RUN: %clang_cc1 -emit-llvm -O1 -o - %s | FileCheck %s
|
||||
// RUN: %clang_cc1 -emit-llvm -fexceptions -o - %s | FileCheck --check-prefix=CHECK-EH %s
|
||||
// RUN: %clang_cc1 -emit-llvm -O1 -fexceptions -o - %s | FileCheck --check-prefix=CHECK-EH %s
|
||||
|
||||
// Test code generation for the named return value optimization.
|
||||
class X {
|
||||
|
@ -13,48 +13,97 @@ public:
|
|||
// CHECK-EH: define void @_Z5test0v
|
||||
X test0() {
|
||||
X x;
|
||||
// CHECK-NOT: call void @_ZN1XD1Ev
|
||||
// CHECK: ret void
|
||||
// CHECK-EH: br label
|
||||
// CHECK-EH: call void @_ZN1XD1Ev
|
||||
// CHECK-EH: br label
|
||||
// CHECK-EH: invoke void @_ZN1XD1Ev
|
||||
// CHECK-EH: ret void
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
// CHECK-EH: call void @_ZN1XC1Ev
|
||||
// CHECK-EH-NEXT: ret void
|
||||
return x;
|
||||
}
|
||||
|
||||
// CHECK: define void @_Z5test1b(
|
||||
// CHECK-EH: define void @_Z5test1b(
|
||||
X test1(bool B) {
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// CHECK: tail call void @_ZN1XC1Ev
|
||||
// CHECK-NEXT: ret void
|
||||
X x;
|
||||
// CHECK-NOT: call void @_ZN1XD1Ev
|
||||
// CHECK: ret void
|
||||
if (B)
|
||||
return (x);
|
||||
return x;
|
||||
// CHECK-EH: invoke void @_ZN1XD1Ev
|
||||
// CHECK-EH: tail call void @_ZN1XC1Ev
|
||||
// CHECK-EH-NEXT: ret void
|
||||
}
|
||||
|
||||
// CHECK: define void @_Z5test2b
|
||||
// CHECK-EH: define void @_Z5test2b
|
||||
X test2(bool B) {
|
||||
// No NRVO
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// No NRVO.
|
||||
|
||||
X x;
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
X y;
|
||||
// CHECK: call void @_ZN1XC1ERKS_
|
||||
// CHECK-EH: invoke void @_ZN1XC1ERKS_
|
||||
if (B)
|
||||
return y;
|
||||
// CHECK: call void @_ZN1XC1ERKS_
|
||||
// CHECK-EH: invoke void @_ZN1XC1ERKS_
|
||||
return x;
|
||||
|
||||
// CHECK: call void @_ZN1XC1Ev
|
||||
// CHECK-NEXT: call void @_ZN1XC1Ev
|
||||
// CHECK: call void @_ZN1XC1ERKS_
|
||||
// CHECK: call void @_ZN1XC1ERKS_
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK: call void @_ZN1XD1Ev
|
||||
// CHECK: ret void
|
||||
|
||||
// The block ordering in the -fexceptions IR is unfortunate.
|
||||
|
||||
// CHECK-EH: call void @_ZN1XC1Ev
|
||||
// CHECK-EH-NEXT: invoke void @_ZN1XC1Ev
|
||||
// -> %invoke.cont1, %lpad
|
||||
|
||||
// %invoke.cont1:
|
||||
// CHECK-EH: br i1
|
||||
// -> %if.then, %if.end
|
||||
|
||||
// %if.then: returning 'x'
|
||||
// CHECK-EH: invoke void @_ZN1XC1ERKS_
|
||||
// -> %cleanup, %lpad5
|
||||
|
||||
// %invoke.cont: rethrow block for %eh.cleanup.
|
||||
// This really should be elsewhere in the function.
|
||||
// CHECK-EH: call void @_Unwind_Resume_or_Rethrow
|
||||
// CHECK-EH-NEXT: unreachable
|
||||
|
||||
// %lpad: landing pad for ctor of 'y', dtor of 'y'
|
||||
// CHECK-EH: call i8* @llvm.eh.exception()
|
||||
// CHECK-EH: call i32 (i8*, i8*, ...)* @llvm.eh.selector
|
||||
// CHECK-EH-NEXT: br label
|
||||
// -> %eh.cleanup
|
||||
|
||||
// %invoke.cont2: normal cleanup for 'x'
|
||||
// CHECK-EH: call void @_ZN1XD1Ev
|
||||
// CHECK-EH-NEXT: ret void
|
||||
|
||||
// %lpad5: landing pad for return copy ctors, EH cleanup for 'y'
|
||||
// CHECK-EH: invoke void @_ZN1XD1Ev
|
||||
// -> %eh.cleanup, %terminate.lpad
|
||||
|
||||
// %if.end: returning 'y'
|
||||
// CHECK-EH: invoke void @_ZN1XC1ERKS_
|
||||
// -> %cleanup, %lpad5
|
||||
|
||||
// %cleanup: normal cleanup for 'y'
|
||||
// CHECK-EH: invoke void @_ZN1XD1Ev
|
||||
// -> %invoke.cont2, %lpad
|
||||
|
||||
// %eh.cleanup: EH cleanup for 'x'
|
||||
// CHECK-EH: invoke void @_ZN1XD1Ev
|
||||
// -> %invoke.cont, %terminate.lpad
|
||||
|
||||
// %terminate.lpad: terminate landing pad.
|
||||
// CHECK-EH: call i8* @llvm.eh.exception()
|
||||
// CHECK-EH-NEXT: call i32 (i8*, i8*, ...)* @llvm.eh.selector
|
||||
// CHECK-EH-NEXT: call void @_ZSt9terminatev()
|
||||
// CHECK-EH-NEXT: unreachable
|
||||
|
||||
}
|
||||
|
||||
X test3(bool B) {
|
||||
|
|
|
@ -12,15 +12,18 @@ void f() {
|
|||
// CHECK: call i32 @__cxa_guard_acquire(i64* @_ZGVZ1fvE1x)
|
||||
// CHECK: invoke void @_ZN1XC1Ev
|
||||
// CHECK: call void @__cxa_guard_release(i64* @_ZGVZ1fvE1x)
|
||||
// CHECK: call i32 @__cxa_atexit
|
||||
// CHECK-NEXT: call i32 @__cxa_atexit
|
||||
// CHECK: br
|
||||
static X x;
|
||||
|
||||
// CHECK: call i8* @__cxa_allocate_exception
|
||||
// CHECK: invoke void @__cxa_throw
|
||||
throw Y();
|
||||
|
||||
// Finally, the landing pad.
|
||||
// CHECK: call i8* @llvm.eh.exception()
|
||||
// CHECK: call i32 (i8*, i8*, ...)* @llvm.eh.selector
|
||||
// CHECK: call void @__cxa_guard_abort(i64* @_ZGVZ1fvE1x)
|
||||
// CHECK: call void @_Unwind_Resume_or_Rethrow
|
||||
// CHECK: unreachable
|
||||
|
||||
// CHECK: call i8* @__cxa_allocate_exception
|
||||
throw Y();
|
||||
}
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -emit-llvm -o %t %s
|
||||
// RUN: %clang_cc1 -triple x86_64-apple-darwin10 -emit-llvm -fexceptions -O2 -o - %s | FileCheck %s
|
||||
//
|
||||
// <rdar://problem/7471679> [irgen] [eh] Exception code built with clang (x86_64) crashes
|
||||
|
||||
// Just check that we don't emit any dead blocks.
|
||||
//
|
||||
// RUN: grep 'No predecessors' %t | count 0
|
||||
|
||||
@interface NSArray @end
|
||||
void f0() {
|
||||
@try {
|
||||
|
@ -16,3 +13,27 @@ void f0() {
|
|||
} @catch (id e) {
|
||||
}
|
||||
}
|
||||
|
||||
// CHECK: define void @f1()
|
||||
void f1() {
|
||||
extern void foo(void);
|
||||
|
||||
while (1) {
|
||||
// CHECK: call void @objc_exception_try_enter
|
||||
// CHECK-NEXT: getelementptr
|
||||
// CHECK-NEXT: call i32 @_setjmp(
|
||||
// CHECK-NEXT: icmp
|
||||
// CHECK-NEXT: br i1
|
||||
@try {
|
||||
// CHECK: call void @foo()
|
||||
foo();
|
||||
// CHECK: call void @objc_exception_try_exit
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
// CHECK: call i8* @objc_exception_extract
|
||||
// CHECK-NEXT: ret void
|
||||
} @finally {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче